diff --git a/akka-http-core/src/main/scala/akka/parboiled2/Base64Parsing.scala b/akka-http-core/src/main/scala/akka/parboiled2/Base64Parsing.scala
new file mode 100644
index 0000000000..15083e649c
--- /dev/null
+++ b/akka-http-core/src/main/scala/akka/parboiled2/Base64Parsing.scala
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import akka.parboiled2.util.Base64
+
+/**
+ * Rules for parsing Base-64 encoded strings.
+ */
+trait Base64Parsing { this: Parser ⇒
+ import Base64Parsing._
+
+ /**
+ * Parses an RFC4045-encoded string and decodes it onto the value stack.
+ */
+ def rfc2045String: Rule1[Array[Byte]] = base64StringOrBlock(rfc2045Alphabet, rfc2045StringDecoder)
+
+ /**
+ * Parses an RFC4045-encoded string potentially containing newlines and decodes it onto the value stack.
+ */
+ def rfc2045Block: Rule1[Array[Byte]] = base64StringOrBlock(rfc2045Alphabet, rfc2045BlockDecoder)
+
+ /**
+ * Parses a org.parboiled2.util.Base64.custom()-encoded string and decodes it onto the value stack.
+ */
+ def base64CustomString: Rule1[Array[Byte]] = base64StringOrBlock(customAlphabet, customStringDecoder)
+
+ /**
+ * Parses a org.parboiled2.util.Base64.custom()-encoded string potentially containing newlines
+ * and decodes it onto the value stack.
+ */
+ def base64CustomBlock: Rule1[Array[Byte]] = base64StringOrBlock(customAlphabet, customBlockDecoder)
+
+ /**
+ * Parses a BASE64-encoded string with the given alphabet and decodes it onto the value
+ * stack using the given codec.
+ */
+ def base64StringOrBlock(alphabet: CharPredicate, decoder: Decoder): Rule1[Array[Byte]] = {
+ val start = cursor
+ rule {
+ oneOrMore(alphabet) ~ run {
+ decoder(input.sliceCharArray(start, cursor)) match {
+ case null ⇒ MISMATCH
+ case bytes ⇒ push(bytes)
+ }
+ }
+ }
+ }
+}
+
+object Base64Parsing {
+ type Decoder = Array[Char] ⇒ Array[Byte]
+
+ val rfc2045Alphabet = CharPredicate(Base64.rfc2045().getAlphabet).asMaskBased
+ val customAlphabet = CharPredicate(Base64.custom().getAlphabet).asMaskBased
+
+ val rfc2045StringDecoder: Decoder = decodeString(Base64.rfc2045())
+ val customStringDecoder: Decoder = decodeString(Base64.custom())
+
+ val rfc2045BlockDecoder: Decoder = decodeBlock(Base64.rfc2045())
+ val customBlockDecoder: Decoder = decodeBlock(Base64.custom())
+
+ def decodeString(codec: Base64)(chars: Array[Char]): Array[Byte] = codec.decodeFast(chars)
+ def decodeBlock(codec: Base64)(chars: Array[Char]): Array[Byte] = codec.decode(chars)
+}
\ No newline at end of file
diff --git a/akka-http-core/src/main/scala/akka/parboiled2/StringBuilding.scala b/akka-http-core/src/main/scala/akka/parboiled2/StringBuilding.scala
new file mode 100644
index 0000000000..821a182226
--- /dev/null
+++ b/akka-http-core/src/main/scala/akka/parboiled2/StringBuilding.scala
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+/**
+ * For certain high-performance use-cases it is better to construct Strings
+ * that the parser is to produce/extract from the input in a char-by-char fashion.
+ *
+ * Mixing this trait into your parser gives you a simple facility to support this.
+ */
+trait StringBuilding { this: Parser ⇒
+ protected val sb = new java.lang.StringBuilder
+
+ def clearSB(): Rule0 = rule { run(sb.setLength(0)) }
+
+ def appendSB(): Rule0 = rule { run(sb.append(lastChar)) }
+
+ def appendSB(offset: Int): Rule0 = rule { run(sb.append(charAt(offset))) }
+
+ def appendSB(c: Char): Rule0 = rule { run(sb.append(c)) }
+
+ def appendSB(s: String): Rule0 = rule { run(sb.append(s)) }
+
+ def prependSB(): Rule0 = rule { run(doPrepend(lastChar)) }
+
+ def prependSB(offset: Int): Rule0 = rule { run(doPrepend(charAt(offset))) }
+
+ def prependSB(c: Char): Rule0 = rule { run(doPrepend(c)) }
+
+ def prependSB(s: String): Rule0 = rule { run(doPrepend(s)) }
+
+ def setSB(s: String): Rule0 = rule { run(doSet(s)) }
+
+ private def doPrepend(c: Char): Unit = {
+ val saved = sb.toString
+ sb.setLength(0)
+ sb.append(c)
+ sb.append(saved)
+ }
+
+ private def doPrepend(s: String): Unit = {
+ val saved = sb.toString
+ sb.setLength(0)
+ sb.append(s)
+ sb.append(saved)
+ }
+
+ private def doSet(s: String): Unit = {
+ sb.setLength(0)
+ sb.append(s)
+ }
+}
diff --git a/akka-parsing/src/main/java/akka/parboiled2/util/Base64.java b/akka-parsing/src/main/java/akka/parboiled2/util/Base64.java
new file mode 100644
index 0000000000..5ec91e12f1
--- /dev/null
+++ b/akka-parsing/src/main/java/akka/parboiled2/util/Base64.java
@@ -0,0 +1,673 @@
+/**
+ * A very fast and memory efficient class to encode and decode to and from BASE64 in full accordance
+ * with RFC 2045.
+ * On Windows XP sp1 with 1.4.2_04 and later ;), this encoder and decoder is about 10 times faster
+ * on small arrays (10 - 1000 bytes) and 2-3 times as fast on larger arrays (10000 - 1000000 bytes)
+ * compared to sun.misc.Encoder()/Decoder().
+ *
+ * On byte arrays the encoder is about 20% faster than Jakarta Commons Base64 Codec for encode and
+ * about 50% faster for decoding large arrays. This implementation is about twice as fast on very small
+ * arrays (< 30 bytes). If source/destination is a String this
+ * version is about three times as fast due to the fact that the Commons Codec result has to be recoded
+ * to a String from byte[], which is very expensive.
+ *
+ * This encode/decode algorithm doesn't create any temporary arrays as many other codecs do, it only
+ * allocates the resulting array. This produces less garbage and it is possible to handle arrays twice
+ * as large as algorithms that create a temporary array. (E.g. Jakarta Commons Codec). It is unknown
+ * whether Sun's sun.misc.Encoder()/Decoder() produce temporary arrays but since performance
+ * is quite low it probably does.
+ *
+ * The encoder produces the same output as the Sun one except that the Sun's encoder appends
+ * a trailing line separator if the last character isn't a pad. Unclear why but it only adds to the
+ * length and is probably a side effect. Both are in conformance with RFC 2045 though.
+ * Commons codec seem to always att a trailing line separator.
+ *
+ * Note!
+ * The encode/decode method pairs (types) come in three versions with the exact same algorithm and
+ * thus a lot of code redundancy. This is to not create any temporary arrays for transcoding to/from different
+ * format types. The methods not used can simply be commented out.
+ *
+ * There is also a "fast" version of all decode methods that works the same way as the normal ones, but
+ * har a few demands on the decoded input. Normally though, these fast verions should be used if the source if
+ * the input is known and it hasn't bee tampered with.
+ *
+ * If you find the code useful or you find a bug, please send me a note at base64 @ miginfocom . com.
+ *
+ * Licence (BSD):
+ * ==============
+ *
+ * Copyright (c) 2004, Mikael Grev, MiG InfoCom AB. (base64 @ miginfocom . com)
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification,
+ * are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright notice, this list
+ * of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice, this
+ * list of conditions and the following disclaimer in the documentation and/or other
+ * materials provided with the distribution.
+ * Neither the name of the MiG InfoCom AB nor the names of its contributors may be
+ * used to endorse or promote products derived from this software without specific
+ * prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+ * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
+ * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
+ * OF SUCH DAMAGE.
+ *
+ * @version 2.2
+ * @author Mikael Grev
+ * Date: 2004-aug-02
+ * Time: 11:31:11
+ *
+ * Adapted in 2009 by Mathias Doenitz.
+ */
+
+package akka.parboiled2.util;
+
+import java.util.Arrays;
+
+@SuppressWarnings({"UnnecessaryParentheses"})
+public class Base64 {
+
+ // -------- FIELDS -------------------------------------------------------------------------------------------------
+
+ private static Base64 RFC2045;
+ private static Base64 CUSTOM;
+
+ private final char[] CA;
+ private final int[] IA;
+ private final char fillChar;
+
+ // -------- STATIC METHODS -----------------------------------------------------------------------------------------
+
+ public static Base64 custom() {
+ if (CUSTOM == null) {
+ CUSTOM = new Base64("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-_");
+ }
+ return CUSTOM;
+ }
+
+ public static Base64 rfc2045() {
+ if (RFC2045 == null) {
+ RFC2045 = new Base64("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=");
+ }
+ return RFC2045;
+ }
+
+ // -------- CONSTRUCTORS -------------------------------------------------------------------------------------------
+
+ public Base64(String alphabet) {
+ if (alphabet == null || alphabet.length() != 65) throw new IllegalArgumentException();
+ CA = alphabet.substring(0, 64).toCharArray();
+ IA = new int[256];
+ Arrays.fill(IA, -1);
+ for (int i = 0, iS = CA.length; i < iS; i++) {
+ IA[CA[i]] = i;
+ }
+ fillChar = alphabet.charAt(64);
+ IA[fillChar] = 0;
+ }
+
+ // -------- OTHER METHODS ------------------------------------------------------------------------------------------
+
+ /**
+ * Decodes a BASE64 encoded char array. All illegal characters will be ignored and can handle both arrays with
+ * and without line separators.
+ *
+ * @param sArr The source array. null or length 0 will return an empty array.
+ * @return The decoded array of bytes. May be of length 0. Will be null if the legal characters
+ * (including '=') isn't divideable by 4. (I.e. definitely corrupted).
+ */
+ public final byte[] decode(char[] sArr) {
+ // Check special case
+ int sLen = sArr != null ? sArr.length : 0;
+ if (sLen == 0) {
+ return new byte[0];
+ }
+
+ // Count illegal characters (including '\r', '\n') to know what size the returned array will be,
+ // so we don't have to reallocate & copy it later.
+ int sepCnt = 0; // Number of separator characters. (Actually illegal characters, but that's a bonus...)
+ for (
+ int i = 0; i <
+ sLen; i++) // If input is "pure" (I.e. no line separators or illegal chars) base64 this loop can be commented out.
+ {
+ if (IA[sArr[i]] < 0) {
+ sepCnt++;
+ }
+ }
+
+ // Check so that legal chars (including '=') are evenly divideable by 4 as specified in RFC 2045.
+ if ((sLen - sepCnt) % 4 != 0) {
+ return null;
+ }
+
+ int pad = 0;
+ for (int i = sLen; i > 1 && IA[sArr[--i]] <= 0;) {
+ if (sArr[i] == fillChar) {
+ pad++;
+ }
+ }
+
+ int len = ((sLen - sepCnt) * 6 >> 3) - pad;
+
+ byte[] dArr = new byte[len]; // Preallocate byte[] of exact length
+
+ for (int s = 0, d = 0; d < len;) {
+ // Assemble three bytes into an int from four "valid" characters.
+ int i = 0;
+ for (int j = 0; j < 4; j++) { // j only increased if a valid char was found.
+ int c = IA[sArr[s++]];
+ if (c >= 0) {
+ i |= c << (18 - j * 6);
+ } else {
+ j--;
+ }
+ }
+ // Add the bytes
+ dArr[d++] = (byte) (i >> 16);
+ if (d < len) {
+ dArr[d++] = (byte) (i >> 8);
+ if (d < len) {
+ dArr[d++] = (byte) i;
+ }
+ }
+ }
+ return dArr;
+ }
+
+ /**
+ * Decodes a BASE64 encoded byte array. All illegal characters will be ignored and can handle both arrays with
+ * and without line separators.
+ *
+ * @param sArr The source array. Length 0 will return an empty array. null will throw an exception.
+ * @return The decoded array of bytes. May be of length 0. Will be null if the legal characters
+ * (including '=') isn't divideable by 4. (I.e. definitely corrupted).
+ */
+ public final byte[] decode(byte[] sArr) {
+ // Check special case
+ int sLen = sArr.length;
+
+ // Count illegal characters (including '\r', '\n') to know what size the returned array will be,
+ // so we don't have to reallocate & copy it later.
+ int sepCnt = 0; // Number of separator characters. (Actually illegal characters, but that's a bonus...)
+ for (
+ int i = 0; i <
+ sLen; i++) // If input is "pure" (I.e. no line separators or illegal chars) base64 this loop can be commented out.
+ {
+ if (IA[sArr[i] & 0xff] < 0) {
+ sepCnt++;
+ }
+ }
+
+ // Check so that legal chars (including '=') are evenly divideable by 4 as specified in RFC 2045.
+ if ((sLen - sepCnt) % 4 != 0) {
+ return null;
+ }
+
+ int pad = 0;
+ for (int i = sLen; i > 1 && IA[sArr[--i] & 0xff] <= 0;) {
+ if (sArr[i] == fillChar) {
+ pad++;
+ }
+ }
+
+ int len = ((sLen - sepCnt) * 6 >> 3) - pad;
+
+ byte[] dArr = new byte[len]; // Preallocate byte[] of exact length
+
+ for (int s = 0, d = 0; d < len;) {
+ // Assemble three bytes into an int from four "valid" characters.
+ int i = 0;
+ for (int j = 0; j < 4; j++) { // j only increased if a valid char was found.
+ int c = IA[sArr[s++] & 0xff];
+ if (c >= 0) {
+ i |= c << (18 - j * 6);
+ } else {
+ j--;
+ }
+ }
+
+ // Add the bytes
+ dArr[d++] = (byte) (i >> 16);
+ if (d < len) {
+ dArr[d++] = (byte) (i >> 8);
+ if (d < len) {
+ dArr[d++] = (byte) i;
+ }
+ }
+ }
+
+ return dArr;
+ }
+
+ /**
+ * Decodes a BASE64 encoded String. All illegal characters will be ignored and can handle both strings with
+ * and without line separators.
+ * Note! It can be up to about 2x the speed to call decode(str.toCharArray()) instead. That
+ * will create a temporary array though. This version will use str.charAt(i) to iterate the string.
+ *
+ * @param str The source string. null or length 0 will return an empty array.
+ * @return The decoded array of bytes. May be of length 0. Will be null if the legal characters
+ * (including '=') isn't divideable by 4. (I.e. definitely corrupted).
+ */
+ public final byte[] decode(String str) {
+ // Check special case
+ int sLen = str != null ? str.length() : 0;
+ if (sLen == 0) {
+ return new byte[0];
+ }
+
+ // Count illegal characters (including '\r', '\n') to know what size the returned array will be,
+ // so we don't have to reallocate & copy it later.
+ int sepCnt = 0; // Number of separator characters. (Actually illegal characters, but that's a bonus...)
+ for (
+ int i = 0; i <
+ sLen; i++) // If input is "pure" (I.e. no line separators or illegal chars) base64 this loop can be commented out.
+ {
+ if (IA[str.charAt(i)] < 0) {
+ sepCnt++;
+ }
+ }
+
+ // Check so that legal chars (including '=') are evenly divideable by 4 as specified in RFC 2045.
+ if ((sLen - sepCnt) % 4 != 0) {
+ return null;
+ }
+
+ // Count '=' at end
+ int pad = 0;
+ for (int i = sLen; i > 1 && IA[str.charAt(--i)] <= 0;) {
+ if (str.charAt(i) == fillChar) {
+ pad++;
+ }
+ }
+
+ int len = ((sLen - sepCnt) * 6 >> 3) - pad;
+
+ byte[] dArr = new byte[len]; // Preallocate byte[] of exact length
+
+ for (int s = 0, d = 0; d < len;) {
+ // Assemble three bytes into an int from four "valid" characters.
+ int i = 0;
+ for (int j = 0; j < 4; j++) { // j only increased if a valid char was found.
+ int c = IA[str.charAt(s++)];
+ if (c >= 0) {
+ i |= c << (18 - j * 6);
+ } else {
+ j--;
+ }
+ }
+ // Add the bytes
+ dArr[d++] = (byte) (i >> 16);
+ if (d < len) {
+ dArr[d++] = (byte) (i >> 8);
+ if (d < len) {
+ dArr[d++] = (byte) i;
+ }
+ }
+ }
+ return dArr;
+ }
+
+ /**
+ * Decodes a BASE64 encoded char array that is known to be resonably well formatted. The method is about twice as
+ * fast as {@link #decode(char[])}. The preconditions are:
+ * + The array must have a line length of 76 chars OR no line separators at all (one line).
+ * + Line separator must be "\r\n", as specified in RFC 2045
+ * + The array must not contain illegal characters within the encoded string
+ * + The array CAN have illegal characters at the beginning and end, those will be dealt with appropriately.
+ *
+ * @param sArr The source array. Length 0 will return an empty array. null will throw an exception.
+ * @return The decoded array of bytes. May be of length 0.
+ */
+ public final byte[] decodeFast(char[] sArr) {
+ // Check special case
+ int sLen = sArr.length;
+ if (sLen == 0) {
+ return new byte[0];
+ }
+
+ int sIx = 0, eIx = sLen - 1; // Start and end index after trimming.
+
+ // Trim illegal chars from start
+ while (sIx < eIx && IA[sArr[sIx]] < 0) {
+ sIx++;
+ }
+
+ // Trim illegal chars from end
+ while (eIx > 0 && IA[sArr[eIx]] < 0) {
+ eIx--;
+ }
+
+ // get the padding count (=) (0, 1 or 2)
+ int pad = sArr[eIx] == fillChar ? (sArr[eIx - 1] == fillChar ? 2 : 1) : 0; // Count '=' at end.
+ int cCnt = eIx - sIx + 1; // Content count including possible separators
+ int sepCnt = sLen > 76 ? (sArr[76] == '\r' ? cCnt / 78 : 0) << 1 : 0;
+
+ int len = ((cCnt - sepCnt) * 6 >> 3) - pad; // The number of decoded bytes
+ byte[] dArr = new byte[len]; // Preallocate byte[] of exact length
+
+ // Decode all but the last 0 - 2 bytes.
+ int d = 0;
+ for (int cc = 0, eLen = (len / 3) * 3; d < eLen;) {
+ // Assemble three bytes into an int from four "valid" characters.
+ int i = IA[sArr[sIx++]] << 18 | IA[sArr[sIx++]] << 12 | IA[sArr[sIx++]] << 6 | IA[sArr[sIx++]];
+
+ // Add the bytes
+ dArr[d++] = (byte) (i >> 16);
+ dArr[d++] = (byte) (i >> 8);
+ dArr[d++] = (byte) i;
+
+ // If line separator, jump over it.
+ if (sepCnt > 0 && ++cc == 19) {
+ sIx += 2;
+ cc = 0;
+ }
+ }
+
+ if (d < len) {
+ // Decode last 1-3 bytes (incl '=') into 1-3 bytes
+ int i = 0;
+ for (int j = 0; sIx <= eIx - pad; j++) {
+ i |= IA[sArr[sIx++]] << (18 - j * 6);
+ }
+
+ for (int r = 16; d < len; r -= 8) {
+ dArr[d++] = (byte) (i >> r);
+ }
+ }
+
+ return dArr;
+ }
+
+ /**
+ * Decodes a BASE64 encoded byte array that is known to be resonably well formatted. The method is about twice as
+ * fast as {@link #decode(byte[])}. The preconditions are:
+ * + The array must have a line length of 76 chars OR no line separators at all (one line).
+ * + Line separator must be "\r\n", as specified in RFC 2045
+ * + The array must not contain illegal characters within the encoded string
+ * + The array CAN have illegal characters at the beginning and end, those will be dealt with appropriately.
+ *
+ * @param sArr The source array. Length 0 will return an empty array. null will throw an exception.
+ * @return The decoded array of bytes. May be of length 0.
+ */
+ public final byte[] decodeFast(byte[] sArr) {
+ // Check special case
+ int sLen = sArr.length;
+ if (sLen == 0) {
+ return new byte[0];
+ }
+
+ int sIx = 0, eIx = sLen - 1; // Start and end index after trimming.
+
+ // Trim illegal chars from start
+ while (sIx < eIx && IA[sArr[sIx] & 0xff] < 0) {
+ sIx++;
+ }
+
+ // Trim illegal chars from end
+ while (eIx > 0 && IA[sArr[eIx] & 0xff] < 0) {
+ eIx--;
+ }
+
+ // get the padding count (=) (0, 1 or 2)
+ int pad = sArr[eIx] == fillChar ? (sArr[eIx - 1] == fillChar ? 2 : 1) : 0; // Count '=' at end.
+ int cCnt = eIx - sIx + 1; // Content count including possible separators
+ int sepCnt = sLen > 76 ? (sArr[76] == '\r' ? cCnt / 78 : 0) << 1 : 0;
+
+ int len = ((cCnt - sepCnt) * 6 >> 3) - pad; // The number of decoded bytes
+ byte[] dArr = new byte[len]; // Preallocate byte[] of exact length
+
+ // Decode all but the last 0 - 2 bytes.
+ int d = 0;
+ for (int cc = 0, eLen = (len / 3) * 3; d < eLen;) {
+ // Assemble three bytes into an int from four "valid" characters.
+ int i = IA[sArr[sIx++]] << 18 | IA[sArr[sIx++]] << 12 | IA[sArr[sIx++]] << 6 | IA[sArr[sIx++]];
+
+ // Add the bytes
+ dArr[d++] = (byte) (i >> 16);
+ dArr[d++] = (byte) (i >> 8);
+ dArr[d++] = (byte) i;
+
+ // If line separator, jump over it.
+ if (sepCnt > 0 && ++cc == 19) {
+ sIx += 2;
+ cc = 0;
+ }
+ }
+
+ if (d < len) {
+ // Decode last 1-3 bytes (incl '=') into 1-3 bytes
+ int i = 0;
+ for (int j = 0; sIx <= eIx - pad; j++) {
+ i |= IA[sArr[sIx++]] << (18 - j * 6);
+ }
+
+ for (int r = 16; d < len; r -= 8) {
+ dArr[d++] = (byte) (i >> r);
+ }
+ }
+
+ return dArr;
+ }
+
+ /**
+ * Decodes a BASE64 encoded string that is known to be resonably well formatted. The method is about twice as
+ * fast as {@link #decode(String)}. The preconditions are:
+ * + The array must have a line length of 76 chars OR no line separators at all (one line).
+ * + Line separator must be "\r\n", as specified in RFC 2045
+ * + The array must not contain illegal characters within the encoded string
+ * + The array CAN have illegal characters at the beginning and end, those will be dealt with appropriately.
+ *
+ * @param s The source string. Length 0 will return an empty array. null will throw an exception.
+ * @return The decoded array of bytes. May be of length 0.
+ */
+ public final byte[] decodeFast(String s) {
+ // Check special case
+ int sLen = s.length();
+ if (sLen == 0) {
+ return new byte[0];
+ }
+
+ int sIx = 0, eIx = sLen - 1; // Start and end index after trimming.
+
+ // Trim illegal chars from start
+ while (sIx < eIx && IA[s.charAt(sIx) & 0xff] < 0) {
+ sIx++;
+ }
+
+ // Trim illegal chars from end
+ while (eIx > 0 && IA[s.charAt(eIx) & 0xff] < 0) {
+ eIx--;
+ }
+
+ // get the padding count (=) (0, 1 or 2)
+ int pad = s.charAt(eIx) == fillChar ? (s.charAt(eIx - 1) == fillChar ? 2 : 1) : 0; // Count '=' at end.
+ int cCnt = eIx - sIx + 1; // Content count including possible separators
+ int sepCnt = sLen > 76 ? (s.charAt(76) == '\r' ? cCnt / 78 : 0) << 1 : 0;
+
+ int len = ((cCnt - sepCnt) * 6 >> 3) - pad; // The number of decoded bytes
+ byte[] dArr = new byte[len]; // Preallocate byte[] of exact length
+
+ // Decode all but the last 0 - 2 bytes.
+ int d = 0;
+ for (int cc = 0, eLen = (len / 3) * 3; d < eLen;) {
+ // Assemble three bytes into an int from four "valid" characters.
+ int i = IA[s.charAt(sIx++)] << 18 | IA[s.charAt(sIx++)] << 12 | IA[s.charAt(sIx++)] << 6 | IA[s
+ .charAt(sIx++)];
+
+ // Add the bytes
+ dArr[d++] = (byte) (i >> 16);
+ dArr[d++] = (byte) (i >> 8);
+ dArr[d++] = (byte) i;
+
+ // If line separator, jump over it.
+ if (sepCnt > 0 && ++cc == 19) {
+ sIx += 2;
+ cc = 0;
+ }
+ }
+
+ if (d < len) {
+ // Decode last 1-3 bytes (incl '=') into 1-3 bytes
+ int i = 0;
+ for (int j = 0; sIx <= eIx - pad; j++) {
+ i |= IA[s.charAt(sIx++)] << (18 - j * 6);
+ }
+
+ for (int r = 16; d < len; r -= 8) {
+ dArr[d++] = (byte) (i >> r);
+ }
+ }
+
+ return dArr;
+ }
+
+ // ****************************************************************************************
+ // * byte[] version
+ // ****************************************************************************************
+
+ /**
+ * Encodes a raw byte array into a BASE64 byte[] representation i accordance with RFC 2045.
+ *
+ * @param sArr The bytes to convert. If null or length 0 an empty array will be returned.
+ * @param lineSep Optional "\r\n" after 76 characters, unless end of file.
+ * No line separator will be in breach of RFC 2045 which specifies max 76 per line but will be a
+ * little faster.
+ * @return A BASE64 encoded array. Never null.
+ */
+ public final byte[] encodeToByte(byte[] sArr, boolean lineSep) {
+ // Check special case
+ int sLen = sArr != null ? sArr.length : 0;
+ if (sLen == 0) {
+ return new byte[0];
+ }
+
+ int eLen = (sLen / 3) * 3; // Length of even 24-bits.
+ int cCnt = ((sLen - 1) / 3 + 1) << 2; // Returned character count
+ int dLen = cCnt + (lineSep ? (cCnt - 1) / 76 << 1 : 0); // Length of returned array
+ byte[] dArr = new byte[dLen];
+
+ // Encode even 24-bits
+ for (int s = 0, d = 0, cc = 0; s < eLen;) {
+ // Copy next three bytes into lower 24 bits of int, paying attension to sign.
+ int i = (sArr[s++] & 0xff) << 16 | (sArr[s++] & 0xff) << 8 | (sArr[s++] & 0xff);
+
+ // Encode the int into four chars
+ dArr[d++] = (byte) CA[(i >>> 18) & 0x3f];
+ dArr[d++] = (byte) CA[(i >>> 12) & 0x3f];
+ dArr[d++] = (byte) CA[(i >>> 6) & 0x3f];
+ dArr[d++] = (byte) CA[i & 0x3f];
+
+ // Add optional line separator
+ if (lineSep && ++cc == 19 && d < dLen - 2) {
+ dArr[d++] = '\r';
+ dArr[d++] = '\n';
+ cc = 0;
+ }
+ }
+
+ // Pad and encode last bits if source isn't an even 24 bits.
+ int left = sLen - eLen; // 0 - 2.
+ if (left > 0) {
+ // Prepare the int
+ int i = ((sArr[eLen] & 0xff) << 10) | (left == 2 ? ((sArr[sLen - 1] & 0xff) << 2) : 0);
+
+ // Set last four chars
+ dArr[dLen - 4] = (byte) CA[i >> 12];
+ dArr[dLen - 3] = (byte) CA[(i >>> 6) & 0x3f];
+ dArr[dLen - 2] = left == 2 ? (byte) CA[i & 0x3f] : (byte) fillChar;
+ dArr[dLen - 1] = (byte) fillChar;
+ }
+ return dArr;
+ }
+
+ // ****************************************************************************************
+ // * String version
+ // ****************************************************************************************
+
+ /**
+ * Encodes a raw byte array into a BASE64 String representation in accordance with RFC 2045.
+ *
+ * @param sArr The bytes to convert. If null or length 0 an empty array will be returned.
+ * @param lineSep Optional "\r\n" after 76 characters, unless end of file.
+ * No line separator will be in breach of RFC 2045 which specifies max 76 per line but will be a
+ * little faster.
+ * @return A BASE64 encoded array. Never null.
+ */
+ public final String encodeToString(byte[] sArr, boolean lineSep) {
+ // Reuse char[] since we can't create a String incrementally anyway and StringBuffer/Builder would be slower.
+ return new String(encodeToChar(sArr, lineSep));
+ }
+
+ // ****************************************************************************************
+ // * char[] version
+ // ****************************************************************************************
+
+ /**
+ * Encodes a raw byte array into a BASE64 char[] representation i accordance with RFC 2045.
+ *
+ * @param sArr The bytes to convert. If null or length 0 an empty array will be returned.
+ * @param lineSep Optional "\r\n" after 76 characters, unless end of file.
+ * No line separator will be in breach of RFC 2045 which specifies max 76 per line but will be a
+ * little faster.
+ * @return A BASE64 encoded array. Never null.
+ */
+ public final char[] encodeToChar(byte[] sArr, boolean lineSep) {
+ // Check special case
+ int sLen = sArr != null ? sArr.length : 0;
+ if (sLen == 0) {
+ return new char[0];
+ }
+
+ int eLen = (sLen / 3) * 3; // Length of even 24-bits.
+ int cCnt = ((sLen - 1) / 3 + 1) << 2; // Returned character count
+ int dLen = cCnt + (lineSep ? (cCnt - 1) / 76 << 1 : 0); // Length of returned array
+ char[] dArr = new char[dLen];
+
+ // Encode even 24-bits
+ for (int s = 0, d = 0, cc = 0; s < eLen;) {
+ // Copy next three bytes into lower 24 bits of int, paying attension to sign.
+ int i = (sArr[s++] & 0xff) << 16 | (sArr[s++] & 0xff) << 8 | (sArr[s++] & 0xff);
+
+ // Encode the int into four chars
+ dArr[d++] = CA[(i >>> 18) & 0x3f];
+ dArr[d++] = CA[(i >>> 12) & 0x3f];
+ dArr[d++] = CA[(i >>> 6) & 0x3f];
+ dArr[d++] = CA[i & 0x3f];
+
+ // Add optional line separator
+ if (lineSep && ++cc == 19 && d < dLen - 2) {
+ dArr[d++] = '\r';
+ dArr[d++] = '\n';
+ cc = 0;
+ }
+ }
+
+ // Pad and encode last bits if source isn't even 24 bits.
+ int left = sLen - eLen; // 0 - 2.
+ if (left > 0) {
+ // Prepare the int
+ int i = ((sArr[eLen] & 0xff) << 10) | (left == 2 ? ((sArr[sLen - 1] & 0xff) << 2) : 0);
+
+ // Set last four chars
+ dArr[dLen - 4] = CA[i >> 12];
+ dArr[dLen - 3] = CA[(i >>> 6) & 0x3f];
+ dArr[dLen - 2] = left == 2 ? CA[i & 0x3f] : fillChar;
+ dArr[dLen - 1] = fillChar;
+ }
+ return dArr;
+ }
+
+ public char[] getAlphabet() {
+ return CA;
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/CharPredicate.scala b/akka-parsing/src/main/scala/akka/parboiled2/CharPredicate.scala
new file mode 100644
index 0000000000..3bc5971645
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/CharPredicate.scala
@@ -0,0 +1,311 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import scala.annotation.tailrec
+import scala.collection.immutable.NumericRange
+
+sealed abstract class CharPredicate extends (Char ⇒ Boolean) {
+ import CharPredicate._
+
+ /**
+ * Determines wether this CharPredicate is an instance of the high-performance,
+ * constant-time `CharPredicate.MaskBased` implementation.
+ */
+ def isMaskBased: Boolean = this.isInstanceOf[MaskBased]
+
+ def asMaskBased: MaskBased =
+ this match {
+ case x: MaskBased ⇒ x
+ case _ ⇒ sys.error("CharPredicate is not MaskBased")
+ }
+
+ def ++(that: CharPredicate): CharPredicate
+ def ++(chars: Seq[Char]): CharPredicate
+ def --(that: CharPredicate): CharPredicate
+ def --(chars: Seq[Char]): CharPredicate
+
+ def ++(char: Char): CharPredicate = this ++ (char :: Nil)
+ def --(char: Char): CharPredicate = this -- (char :: Nil)
+ def ++(chars: String): CharPredicate = this ++ chars.toCharArray
+ def --(chars: String): CharPredicate = this -- chars.toCharArray
+
+ def intersect(that: CharPredicate): CharPredicate
+
+ def negated: CharPredicate = this match {
+ case Empty ⇒ All
+ case All ⇒ Empty
+ case x ⇒ from(c ⇒ !x(c))
+ }
+
+ def matchesAny(string: String): Boolean = {
+ @tailrec def rec(ix: Int): Boolean =
+ if (ix == string.length) false else if (this(string charAt ix)) true else rec(ix + 1)
+ rec(0)
+ }
+
+ def matchesAll(string: String): Boolean = {
+ @tailrec def rec(ix: Int): Boolean =
+ if (ix == string.length) true else if (!this(string charAt ix)) false else rec(ix + 1)
+ rec(0)
+ }
+
+ def indexOfFirstMatch(string: String): Int = {
+ @tailrec def rec(ix: Int): Int =
+ if (ix == string.length) -1 else if (this(string charAt ix)) ix else rec(ix + 1)
+ rec(0)
+ }
+
+ def indexOfFirstMismatch(string: String): Int = {
+ @tailrec def rec(ix: Int): Int =
+ if (ix == string.length) -1 else if (this(string charAt ix)) rec(ix + 1) else ix
+ rec(0)
+ }
+
+ def firstMatch(string: String): Option[Char] =
+ indexOfFirstMatch(string) match {
+ case -1 ⇒ None
+ case ix ⇒ Some(string charAt ix)
+ }
+
+ def firstMismatch(string: String): Option[Char] =
+ indexOfFirstMismatch(string) match {
+ case -1 ⇒ None
+ case ix ⇒ Some(string charAt ix)
+ }
+
+ protected def or(that: Char ⇒ Boolean): CharPredicate =
+ from(if (this == Empty) that else c ⇒ this(c) || that(c))
+ protected def and(that: Char ⇒ Boolean): CharPredicate =
+ if (this == Empty) Empty else from(c ⇒ this(c) && that(c))
+ protected def andNot(that: Char ⇒ Boolean): CharPredicate =
+ from(if (this == Empty) c ⇒ !that(c) else c ⇒ this(c) && !that(c))
+}
+
+object CharPredicate {
+ val Empty: CharPredicate = MaskBased(0L, 0L)
+ val All: CharPredicate = from(_ ⇒ true)
+ val LowerAlpha = CharPredicate('a' to 'z')
+ val UpperAlpha = CharPredicate('A' to 'Z')
+ val Alpha = LowerAlpha ++ UpperAlpha
+ val Digit = CharPredicate('0' to '9')
+ val Digit19 = CharPredicate('1' to '9')
+ val AlphaNum = Alpha ++ Digit
+ val LowerHexLetter = CharPredicate('a' to 'f')
+ val UpperHexLetter = CharPredicate('A' to 'F')
+ val HexLetter = LowerHexLetter ++ UpperHexLetter
+ val HexDigit = Digit ++ HexLetter
+ val Visible = CharPredicate('\u0021' to '\u007e')
+ val Printable = Visible ++ ' '
+
+ def from(predicate: Char ⇒ Boolean): CharPredicate =
+ predicate match {
+ case x: CharPredicate ⇒ x
+ case x ⇒ General(x)
+ }
+
+ def apply(magnets: ApplyMagnet*): CharPredicate = (Empty /: magnets) { (a, m) ⇒ a ++ m.predicate }
+
+ class ApplyMagnet(val predicate: CharPredicate)
+ object ApplyMagnet {
+ implicit def fromPredicate(predicate: Char ⇒ Boolean): ApplyMagnet = new ApplyMagnet(from(predicate))
+ implicit def fromChar(c: Char): ApplyMagnet = fromChars(c :: Nil)
+ implicit def fromCharArray(array: Array[Char]): ApplyMagnet = fromChars(array)
+ implicit def fromString(chars: String): ApplyMagnet = fromChars(chars)
+ implicit def fromChars(chars: Seq[Char]): ApplyMagnet =
+ chars match {
+ case _ if chars.size < 128 & !chars.exists(unmaskable) ⇒
+ @tailrec def rec(ix: Int, result: CharPredicate): CharPredicate =
+ if (ix == chars.length) result else rec(ix + 1, result ++ chars(ix))
+ new ApplyMagnet(rec(0, Empty))
+ case r: NumericRange[Char] ⇒ new ApplyMagnet(new RangeBased(r))
+ case _ ⇒ new ApplyMagnet(new ArrayBased(chars.toArray))
+ }
+ }
+
+ ///////////////////////// PRIVATE ////////////////////////////
+
+ private def unmaskable(c: Char) = c >= 128
+
+ // efficient handling of 7bit-ASCII chars
+ case class MaskBased private[CharPredicate] (lowMask: Long, highMask: Long) extends CharPredicate {
+ def apply(c: Char): Boolean = {
+ val mask = if (c < 64) lowMask else highMask
+ ((1L << c) & ((c - 128) >> 31) & mask) != 0L // branchless for `(c < 128) && (mask & (1L << c) != 0)`
+ }
+
+ def ++(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ this
+ case _ if this == Empty ⇒ that
+ case MaskBased(low, high) ⇒ MaskBased(lowMask | low, highMask | high)
+ case _ ⇒ this or that
+ }
+
+ def ++(chars: Seq[Char]): CharPredicate = chars.foldLeft(this: CharPredicate) {
+ case (_: MaskBased, c) if unmaskable(c) ⇒ new ArrayBased(chars.toArray) ++ new ArrayBased(toArray)
+ case (MaskBased(low, high), c) if c < 64 ⇒ MaskBased(low | 1L << c, high)
+ case (MaskBased(low, high), c) ⇒ MaskBased(low, high | 1L << c)
+ case (x, _) ⇒ x // once the fold acc is not a MaskBased we are done
+ }
+
+ def --(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ this
+ case _ if this == Empty ⇒ this
+ case MaskBased(low, high) ⇒ MaskBased(lowMask & ~low, highMask & ~high)
+ case _ ⇒ this andNot that
+ }
+
+ def --(chars: Seq[Char]): CharPredicate =
+ if (this != Empty) {
+ chars.foldLeft(this: CharPredicate) {
+ case (_: MaskBased, c) if unmaskable(c) ⇒ this andNot new ArrayBased(chars.toArray)
+ case (MaskBased(low, high), c) if c < 64 ⇒ MaskBased(low & ~(1L << c), high)
+ case (MaskBased(low, high), c) ⇒ MaskBased(low, high & ~(1L << c))
+ case (x, _) ⇒ x // once the fold acc is not a MaskBased we are done
+ }
+ } else this
+
+ def intersect(that: CharPredicate) = that match {
+ case Empty ⇒ Empty
+ case _ if this == Empty ⇒ Empty
+ case MaskBased(low, high) ⇒ MaskBased(lowMask & low, highMask & high)
+ case _ ⇒ this and that
+ }
+
+ def size: Int = java.lang.Long.bitCount(lowMask) + java.lang.Long.bitCount(highMask)
+
+ def toArray: Array[Char] = {
+ val array = new Array[Char](size)
+ getChars(array, 0)
+ array
+ }
+
+ def getChars(array: Array[Char], startIx: Int): Unit = {
+ @tailrec def rec(mask: Long, offset: Int, bit: Int, ix: Int): Int =
+ if (bit < 64 && ix < array.length) {
+ if ((mask & (1L << bit)) > 0) {
+ array(ix) = (offset + bit).toChar
+ rec(mask, offset, bit + 1, ix + 1)
+ } else rec(mask, offset, bit + 1, ix)
+ } else ix
+ rec(highMask, 64, java.lang.Long.numberOfTrailingZeros(highMask),
+ rec(lowMask, 0, java.lang.Long.numberOfTrailingZeros(lowMask), startIx))
+ }
+
+ override def toString(): String = "CharPredicate.MaskBased(" + new String(toArray) + ')'
+ }
+
+ class RangeBased private[CharPredicate] (private val range: NumericRange[Char]) extends CharPredicate {
+ def apply(c: Char): Boolean = range contains c
+
+ def ++(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ this
+ case _ ⇒ this or that
+ }
+
+ def ++(other: Seq[Char]): CharPredicate = if (other.nonEmpty) this ++ CharPredicate(other) else this
+
+ def --(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ this
+ case _ ⇒ this andNot that
+ }
+
+ def --(other: Seq[Char]): CharPredicate = if (other.nonEmpty) this -- CharPredicate(other) else this
+
+ def intersect(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ Empty
+ case _ ⇒ this and that
+ }
+
+ override def toString(): String = s"CharPredicate.RangeBased(start = ${range.start}, end = ${range.end}, " +
+ s"step = ${range.step.toInt}, inclusive = ${range.isInclusive})"
+ }
+
+ class ArrayBased private[CharPredicate] (private val chars: Array[Char]) extends CharPredicate {
+ import java.util.Arrays._
+ sort(chars)
+
+ // TODO: switch to faster binary search algorithm with an adaptive pivot, e.g. http://ochafik.com/blog/?p=106
+ def apply(c: Char): Boolean = binarySearch(chars, c) >= 0
+
+ def ++(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ this
+ case x: ArrayBased ⇒ this ++ x.chars
+ case _ ⇒ this or that
+ }
+
+ def ++(other: Seq[Char]): CharPredicate =
+ if (other.nonEmpty) new ArrayBased((this -- other).chars ++ other.toArray[Char])
+ else this
+
+ def --(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ this
+ case x: ArrayBased ⇒ this -- x.chars
+ case _ ⇒ this andNot that
+ }
+
+ def --(other: Seq[Char]): ArrayBased =
+ if (other.nonEmpty) {
+ val otherChars = other.toArray
+ new ArrayBased(chars.filter(binarySearch(otherChars, _) < 0))
+ } else this
+
+ def intersect(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ Empty
+ case x: ArrayBased ⇒ new ArrayBased(chars.intersect(x.chars))
+ case _ ⇒ this and that
+ }
+
+ override def toString(): String = "CharPredicate.ArrayBased(" + new String(chars) + ')'
+ }
+
+ case class General private[CharPredicate] (predicate: Char ⇒ Boolean) extends CharPredicate {
+ def apply(c: Char) = predicate(c)
+
+ def ++(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ this
+ case General(thatPredicate) ⇒ from(c ⇒ predicate(c) || thatPredicate(c))
+ case _ ⇒ from(c ⇒ predicate(c) || that(c))
+ }
+
+ def ++(chars: Seq[Char]): CharPredicate =
+ if (chars.nonEmpty) {
+ val abp = new ArrayBased(chars.toArray)
+ from(c ⇒ predicate(c) || abp(c))
+ } else this
+
+ def --(that: CharPredicate): CharPredicate = that match {
+ case Empty ⇒ this
+ case General(thatPredicate) ⇒ from(c ⇒ predicate(c) && !thatPredicate(c))
+ case _ ⇒ from(c ⇒ predicate(c) && !that(c))
+ }
+
+ def --(chars: Seq[Char]): CharPredicate =
+ if (chars.nonEmpty) {
+ val abp = new ArrayBased(chars.toArray)
+ from(c ⇒ predicate(c) && !abp(c))
+ } else this
+
+ def intersect(that: CharPredicate) = that match {
+ case Empty ⇒ Empty
+ case General(thatPredicate) ⇒ from(c ⇒ predicate(c) && that(c))
+ case _ ⇒ this and that
+ }
+
+ override def toString(): String = "CharPredicate.General@" + System.identityHashCode(this)
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/CharUtils.scala b/akka-parsing/src/main/scala/akka/parboiled2/CharUtils.scala
new file mode 100644
index 0000000000..fca40c5a45
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/CharUtils.scala
@@ -0,0 +1,194 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import java.lang.{ StringBuilder ⇒ JStringBuilder }
+import scala.annotation.tailrec
+
+object CharUtils {
+ /**
+ * Returns the int value of a given hex digit char.
+ * Note: this implementation is very fast (since it's branchless) and therefore
+ * does not perform ANY range checks!
+ */
+ def hexValue(c: Char): Int = (c & 0x1f) + ((c >> 6) * 0x19) - 0x10
+
+ /**
+ * Computes the number of hex digits required to represent the given integer.
+ * Leading zeros are not counted.
+ */
+ def numberOfHexDigits(l: Long): Int = (math.max(63 - java.lang.Long.numberOfLeadingZeros(l), 0) >> 2) + 1
+
+ /**
+ * Returns the lower-case hex digit corresponding to the last 4 bits of the given Long.
+ * (fast branchless implementation)
+ */
+ def lowerHexDigit(long: Long): Char = lowerHexDigit_internal((long & 0x0FL).toInt)
+
+ /**
+ * Returns the lower-case hex digit corresponding to the last 4 bits of the given Int.
+ * (fast branchless implementation)
+ */
+ def lowerHexDigit(int: Int): Char = lowerHexDigit_internal(int & 0x0F)
+
+ private def lowerHexDigit_internal(i: Int) = (48 + i + (39 & ((9 - i) >> 31))).toChar
+
+ /**
+ * Returns the upper-case hex digit corresponding to the last 4 bits of the given Long.
+ * (fast branchless implementation)
+ */
+ def upperHexDigit(long: Long): Char = upperHexDigit_internal((long & 0x0FL).toInt)
+
+ /**
+ * Returns the upper-case hex digit corresponding to the last 4 bits of the given Int.
+ * (fast branchless implementation)
+ */
+ def upperHexDigit(int: Int): Char = upperHexDigit_internal(int & 0x0F)
+
+ private def upperHexDigit_internal(i: Int) = (48 + i + (7 & ((9 - i) >> 31))).toChar
+
+ /**
+ * Efficiently converts the given long into an upper-case hex string.
+ */
+ def upperHexString(long: Long): String =
+ appendUpperHexString(new JStringBuilder(numberOfHexDigits(long)), long).toString
+
+ /**
+ * Append the lower-case hex representation of the given long to the given StringBuilder.
+ */
+ def appendUpperHexString(sb: JStringBuilder, long: Long): JStringBuilder =
+ if (long != 0) {
+ @tailrec def putChar(shift: Int): JStringBuilder = {
+ sb.append(upperHexDigit(long >>> shift))
+ if (shift > 0) putChar(shift - 4) else sb
+ }
+ putChar((63 - java.lang.Long.numberOfLeadingZeros(long)) & 0xFC)
+ } else sb.append('0')
+
+ /**
+ * Efficiently converts the given long into a lower-case hex string.
+ */
+ def lowerHexString(long: Long): String =
+ appendLowerHexString(new JStringBuilder(numberOfHexDigits(long)), long).toString
+
+ /**
+ * Append the lower-case hex representation of the given long to the given StringBuilder.
+ */
+ def appendLowerHexString(sb: JStringBuilder, long: Long): JStringBuilder =
+ if (long != 0) {
+ @tailrec def putChar(shift: Int): JStringBuilder = {
+ sb.append(lowerHexDigit(long >>> shift))
+ if (shift > 0) putChar(shift - 4) else sb
+ }
+ putChar((63 - java.lang.Long.numberOfLeadingZeros(long)) & 0xFC)
+ } else sb.append('0')
+
+ /**
+ * Returns a String representing the given long in signed decimal representation.
+ */
+ def signedDecimalString(long: Long): String = new String(signedDecimalChars(long))
+
+ /**
+ * Computes the number of characters required for the signed decimal representation of the given integer.
+ */
+ def numberOfDecimalDigits(long: Long): Int =
+ if (long != Long.MinValue) _numberOfDecimalDigits(long) else 20
+
+ private def _numberOfDecimalDigits(long: Long): Int = {
+ def mul10(l: Long) = (l << 3) + (l << 1)
+ @tailrec def len(test: Long, l: Long, result: Int): Int =
+ if (test > l || test < 0) result else len(mul10(test), l, result + 1)
+ if (long < 0) len(10, -long, 2) else len(10, long, 1)
+ }
+
+ val LongMinValueChars = "-9223372036854775808".toCharArray
+
+ /**
+ * Returns a char array representing the given long in signed decimal representation.
+ */
+ def signedDecimalChars(long: Long): Array[Char] =
+ if (long != Long.MinValue) {
+ val len = _numberOfDecimalDigits(long)
+ val buf = new Array[Char](len)
+ getSignedDecimalChars(long, len, buf)
+ buf
+ } else LongMinValueChars
+
+ /**
+ * Converts the given Long value into its signed decimal character representation.
+ * The characters are placed into the given buffer *before* the given `endIndex` (exclusively).
+ * CAUTION: This algorithm cannot deal with `Long.MinValue`, you'll need to special case this value!
+ */
+ def getSignedDecimalChars(long: Long, endIndex: Int, buf: Array[Char]): Unit = {
+ def div10(i: Int) = {
+ var q = (i << 3) + (i << 2)
+ q += (q << 12) + (q << 8) + (q << 4) + i
+ q >>>= 19
+ q // 52429 * l / 524288 = l * 0.10000038146972656
+ }
+ def mul10(i: Int) = (i << 3) + (i << 1)
+ def mul100(l: Long) = (l << 6) + (l << 5) + (l << 2)
+
+ phase1(math.abs(long), endIndex)
+
+ // for large numbers we bite the bullet of performing one division every two digits
+ @tailrec def phase1(l: Long, ix: Int): Unit =
+ if (l > 65535L) {
+ val q = l / 100
+ val r = (l - mul100(q)).toInt
+ val rq = div10(r)
+ buf(ix - 2) = ('0' + rq).toChar
+ buf(ix - 1) = ('0' + r - mul10(rq)).toChar
+ phase1(q, ix - 2)
+ } else phase2(l.toInt, ix)
+
+ // for small numbers we can use the "fast-path"
+ @tailrec def phase2(i: Int, ix: Int): Unit = {
+ val q = div10(i)
+ val r = i - mul10(q)
+ buf(ix - 1) = ('0' + r).toChar
+ if (q != 0) phase2(q, ix - 1)
+ else if (long < 0) buf(ix - 2) = '-'
+ }
+ }
+
+ /**
+ * Efficiently lower-cases the given character.
+ * Note: only works for 7-bit ASCII letters.
+ */
+ def toLowerCase(c: Char): Char = if (CharPredicate.UpperAlpha(c)) (c + 0x20).toChar else c
+
+ /**
+ * Efficiently upper-cases the given character.
+ * Note: only works for 7-bit ASCII letters.
+ */
+ def toUpperCase(c: Char): Char = if (CharPredicate.LowerAlpha(c)) (c + 0x20).toChar else c
+
+ def escape(c: Char): String = c match {
+ case '\t' ⇒ "\\t"
+ case '\r' ⇒ "\\r"
+ case '\n' ⇒ "\\n"
+ case EOI ⇒ "EOI"
+ case x if Character.isISOControl(x) ⇒ "\\u%04x" format c.toInt
+ case x ⇒ x.toString
+ }
+
+ val escapedChars = CharPredicate("\t\r\n", EOI, Character.isISOControl _)
+
+ def escape(s: String): String =
+ if (escapedChars.matchesAny(s)) s.flatMap(escape(_: Char)) else s
+}
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala b/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala
new file mode 100644
index 0000000000..6b761d6b4a
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import scala.language.experimental.macros
+
+import scala.collection.immutable
+import scala.reflect.macros.Context
+import akka.shapeless.HList
+
+/**
+ * An application needs to implement this interface to receive the result
+ * of a dynamic parsing run.
+ * Often times this interface is directly implemented by the Parser class itself
+ * (even though this is not a requirement).
+ */
+trait DynamicRuleHandler[P <: Parser, L <: HList] extends Parser.DeliveryScheme[L] {
+ def parser: P
+ def ruleNotFound(ruleName: String): Result
+}
+
+/**
+ * Runs one of the rules of a parser instance of type `P` given the rules name.
+ * The rule must have type `RuleN[L]`.
+ */
+trait DynamicRuleDispatch[P <: Parser, L <: HList] {
+ def apply(handler: DynamicRuleHandler[P, L], ruleName: String): handler.Result
+}
+
+object DynamicRuleDispatch {
+
+ /**
+ * Implements efficient runtime dispatch to a predefined set of parser rules.
+ * Given a number of rule names this macro-supported method creates a `DynamicRuleDispatch` instance along with
+ * a sequence of the given rule names.
+ * Note that there is no reflection involved and compilation will fail, if one of the given rule names
+ * does not constitute a method of parser type `P` or has a type different from `RuleN[L]`.
+ */
+ def apply[P <: Parser, L <: HList](ruleNames: String*): (DynamicRuleDispatch[P, L], immutable.Seq[String]) = macro __create[P, L]
+
+ ///////////////////// INTERNAL ////////////////////////
+
+ def __create[P <: Parser, L <: HList](c: Context)(ruleNames: c.Expr[String]*)(implicit P: c.WeakTypeTag[P], L: c.WeakTypeTag[L]): c.Expr[(DynamicRuleDispatch[P, L], immutable.Seq[String])] = {
+ import c.universe._
+ val names: Array[String] = ruleNames.map {
+ _.tree match {
+ case Literal(Constant(s: String)) ⇒ s
+ case x ⇒ c.abort(x.pos, s"Invalid `String` argument `x`, only `String` literals are supported!")
+ }
+ }(collection.breakOut)
+ java.util.Arrays.sort(names.asInstanceOf[Array[Object]])
+
+ def rec(start: Int, end: Int): Tree =
+ if (start <= end) {
+ val mid = (start + end) >>> 1
+ val name = names(mid)
+ q"""val c = $name compare ruleName
+ if (c < 0) ${rec(mid + 1, end)}
+ else if (c > 0) ${rec(start, mid - 1)}
+ else {
+ val p = handler.parser
+ p.__run[$L](p.${newTermName(name).encodedName.toTermName})(handler)
+ }"""
+ } else q"handler.ruleNotFound(ruleName)"
+
+ c.Expr[(DynamicRuleDispatch[P, L], immutable.Seq[String])] {
+ q"""val drd =
+ new akka.parboiled2.DynamicRuleDispatch[$P, $L] {
+ def apply(handler: akka.parboiled2.DynamicRuleHandler[$P, $L], ruleName: String): handler.Result =
+ ${rec(0, names.length - 1)}
+ }
+ (drd, scala.collection.immutable.Seq(..$ruleNames))"""
+ }
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/ParseError.scala b/akka-parsing/src/main/scala/akka/parboiled2/ParseError.scala
new file mode 100644
index 0000000000..8b21c4a2d1
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/ParseError.scala
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import CharUtils.escape
+
+case class ParseError(position: Position, traces: Seq[RuleTrace]) extends RuntimeException {
+ def formatExpectedAsString: String = {
+ val expected = formatExpectedAsSeq
+ expected.size match {
+ case 0 ⇒ "??"
+ case 1 ⇒ expected.head
+ case _ ⇒ expected.init.mkString(", ") + " or " + expected.last
+ }
+ }
+ def formatExpectedAsSeq: Seq[String] =
+ traces.map { trace ⇒
+ if (trace.frames.nonEmpty) {
+ val exp = trace.frames.last.format
+ val nonEmptyExp = if (exp.isEmpty) "?" else exp
+ if (trace.isNegated) "!" + nonEmptyExp else nonEmptyExp
+ } else "???"
+ }.distinct
+
+ def formatTraces: String =
+ traces.map(_.format).mkString(traces.size + " rule" + (if (traces.size != 1) "s" else "") +
+ " mismatched at error location:\n ", "\n ", "\n")
+}
+
+case class Position(index: Int, line: Int, column: Int)
+
+// outermost (i.e. highest-level) rule first
+case class RuleTrace(frames: Seq[RuleFrame]) {
+ def format: String =
+ frames.size match {
+ case 0 ⇒ ""
+ case 1 ⇒ frames.head.format
+ case _ ⇒
+ // we don't want to show intermediate Sequence and RuleCall frames in the trace
+ def show(frame: RuleFrame) = !(frame.isInstanceOf[RuleFrame.Sequence] || frame.isInstanceOf[RuleFrame.RuleCall])
+ frames.init.filter(show).map(_.format).mkString("", " / ", " / " + frames.last.format)
+ }
+
+ def isNegated: Boolean = (frames.count(_.anon == RuleFrame.NotPredicate) & 0x01) > 0
+}
+
+sealed abstract class RuleFrame {
+ import RuleFrame._
+ def anon: RuleFrame.Anonymous
+
+ def format: String =
+ this match {
+ case Named(name, _) ⇒ name
+ case Sequence(_) ⇒ "~"
+ case FirstOf(_) ⇒ "|"
+ case CharMatch(c) ⇒ "'" + escape(c) + '\''
+ case StringMatch(s) ⇒ '"' + escape(s) + '"'
+ case MapMatch(m) ⇒ m.toString()
+ case IgnoreCaseChar(c) ⇒ "'" + escape(c) + '\''
+ case IgnoreCaseString(s) ⇒ '"' + escape(s) + '"'
+ case CharPredicateMatch(_, name) ⇒ if (name.nonEmpty) name else ""
+ case RuleCall(callee) ⇒ '(' + callee + ')'
+ case AnyOf(s) ⇒ '[' + escape(s) + ']'
+ case NoneOf(s) ⇒ s"[^${escape(s)}]"
+ case Times(_, _) ⇒ "times"
+ case CharRange(from, to) ⇒ s"'${escape(from)}'-'${escape(to)}'"
+ case AndPredicate ⇒ "&"
+ case NotPredicate ⇒ "!"
+ case SemanticPredicate ⇒ "test"
+ case ANY ⇒ "ANY"
+ case _ ⇒ {
+ val s = toString
+ s.updated(0, s.charAt(0).toLower)
+ }
+ }
+}
+
+object RuleFrame {
+ def apply(frame: Anonymous, name: String): RuleFrame =
+ if (name.isEmpty) frame else Named(name, frame)
+
+ case class Named(name: String, anon: Anonymous) extends RuleFrame
+
+ sealed abstract class Anonymous extends RuleFrame {
+ def anon: Anonymous = this
+ }
+ case class Sequence(subs: Int) extends Anonymous
+ case class FirstOf(subs: Int) extends Anonymous
+ case class CharMatch(char: Char) extends Anonymous
+ case class StringMatch(string: String) extends Anonymous
+ case class MapMatch(map: Map[String, Any]) extends Anonymous
+ case class IgnoreCaseChar(char: Char) extends Anonymous
+ case class IgnoreCaseString(string: String) extends Anonymous
+ case class CharPredicateMatch(predicate: CharPredicate, name: String) extends Anonymous
+ case class AnyOf(string: String) extends Anonymous
+ case class NoneOf(string: String) extends Anonymous
+ case class Times(min: Int, max: Int) extends Anonymous
+ case class RuleCall(callee: String) extends Anonymous
+ case class CharRange(from: Char, to: Char) extends Anonymous
+ case object ANY extends Anonymous
+ case object Optional extends Anonymous
+ case object ZeroOrMore extends Anonymous
+ case object OneOrMore extends Anonymous
+ case object AndPredicate extends Anonymous
+ case object NotPredicate extends Anonymous
+ case object SemanticPredicate extends Anonymous
+ case object Capture extends Anonymous
+ case object Run extends Anonymous
+ case object Push extends Anonymous
+ case object Drop extends Anonymous
+ case object Action extends Anonymous
+ case object RunSubParser extends Anonymous
+}
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/Parser.scala b/akka-parsing/src/main/scala/akka/parboiled2/Parser.scala
new file mode 100644
index 0000000000..d0f8901071
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/Parser.scala
@@ -0,0 +1,489 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import scala.annotation.tailrec
+import scala.collection.immutable.VectorBuilder
+import scala.util.{ Failure, Success, Try }
+import scala.util.control.{ NonFatal, NoStackTrace }
+import akka.shapeless._
+import akka.parboiled2.support._
+
+abstract class Parser(initialValueStackSize: Int = 16,
+ maxValueStackSize: Int = 1024) extends RuleDSL {
+ import Parser._
+
+ require(maxValueStackSize <= 65536, "`maxValueStackSize` > 2^16 is not supported") // due to current snapshot design
+
+ /**
+ * The input this parser instance is running against.
+ */
+ def input: ParserInput
+
+ /**
+ * Converts a compile-time only rule definition into the corresponding rule method implementation.
+ */
+ def rule[I <: HList, O <: HList](r: Rule[I, O]): Rule[I, O] = macro ParserMacros.ruleImpl[I, O]
+
+ /**
+ * The index of the next (yet unmatched) input character.
+ * Might be equal to `input.length`!
+ */
+ def cursor: Int = _cursor
+
+ /**
+ * The next (yet unmatched) input character, i.e. the one at the `cursor` index.
+ * Identical to `if (cursor < input.length) input.charAt(cursor) else EOI` but more efficient.
+ */
+ def cursorChar: Char = _cursorChar
+
+ /**
+ * Returns the last character that was matched, i.e. the one at index cursor - 1
+ * Note: for performance optimization this method does *not* do a range check,
+ * i.e. depending on the ParserInput implementation you might get an exception
+ * when calling this method before any character was matched by the parser.
+ */
+ def lastChar: Char = charAt(-1)
+
+ /**
+ * Returns the character at the input index with the given delta to the cursor.
+ * Note: for performance optimization this method does *not* do a range check,
+ * i.e. depending on the ParserInput implementation you might get an exception
+ * when calling this method before any character was matched by the parser.
+ */
+ def charAt(offset: Int): Char = input.charAt(cursor + offset)
+
+ /**
+ * Same as `charAt` but range-checked.
+ * Returns the input character at the index with the given offset from the cursor.
+ * If this index is out of range the method returns `EOI`.
+ */
+ def charAtRC(offset: Int): Char = {
+ val ix = cursor + offset
+ if (0 <= ix && ix < input.length) input.charAt(ix) else EOI
+ }
+
+ /**
+ * Allows "raw" (i.e. untyped) access to the `ValueStack`.
+ * In most cases you shouldn't need to access the value stack directly from your code.
+ * Use only if you know what you are doing!
+ */
+ def valueStack: ValueStack = _valueStack
+
+ /**
+ * Pretty prints the given `ParseError` instance in the context of the `ParserInput` of this parser.
+ */
+ def formatError(error: ParseError, showExpected: Boolean = true, showPosition: Boolean = true,
+ showLine: Boolean = true, showTraces: Boolean = false): String = {
+ val sb = new java.lang.StringBuilder(formatErrorProblem(error))
+ import error._
+ if (showExpected) sb.append(", expected ").append(formatExpectedAsString)
+ if (showPosition) sb.append(" (line ").append(position.line).append(", column ").append(position.column).append(')')
+ if (showLine) sb.append(':').append('\n').append(formatErrorLine(error))
+ if (showTraces) sb.append('\n').append('\n').append(formatTraces)
+ sb.toString
+ }
+
+ /**
+ * Pretty prints the input line in which the error occurred and underlines the error position in the line
+ * with a caret.
+ */
+ def formatErrorProblem(error: ParseError): String =
+ if (error.position.index < input.length) s"Invalid input '${CharUtils.escape(input charAt error.position.index)}'"
+ else "Unexpected end of input"
+
+ /**
+ * Pretty prints the input line in which the error occurred and underlines the error position in the line
+ * with a caret.
+ */
+ def formatErrorLine(error: ParseError): String =
+ (input getLine error.position.line) + '\n' + (" " * (error.position.column - 1) + '^')
+
+ ////////////////////// INTERNAL /////////////////////////
+
+ // the char at the current input index
+ private var _cursorChar: Char = _
+
+ // the index of the current input char
+ private var _cursor: Int = _
+
+ // the value stack instance we operate on
+ private var _valueStack: ValueStack = _
+
+ // the highest input index we have seen in the current run
+ // special value: -1 (not collecting errors)
+ private var maxCursor: Int = _
+
+ // the number of times we have already seen a character mismatch at the error index
+ private var mismatchesAtErrorCursor: Int = _
+
+ // the index of the RuleStack we are currently constructing
+ // for the ParseError to be (potentially) returned in the current parser run,
+ // special value: -1 (during the run to establish the error location (maxCursor))
+ private var currentErrorRuleStackIx: Int = _
+
+ def copyStateFrom(other: Parser, offset: Int): Unit = {
+ _cursorChar = other._cursorChar
+ _cursor = other._cursor - offset
+ _valueStack = other._valueStack
+ maxCursor = other.maxCursor - offset
+ mismatchesAtErrorCursor = other.mismatchesAtErrorCursor
+ currentErrorRuleStackIx = other.currentErrorRuleStackIx
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __collectingErrors = maxCursor >= 0
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __run[L <: HList](rule: ⇒ RuleN[L])(implicit scheme: Parser.DeliveryScheme[L]): scheme.Result = {
+ def runRule(errorRuleStackIx: Int = -1): Boolean = {
+ _cursor = -1
+ __advance()
+ valueStack.clear()
+ mismatchesAtErrorCursor = 0
+ currentErrorRuleStackIx = errorRuleStackIx
+ rule ne null
+ }
+
+ @tailrec
+ def errorPosition(ix: Int = 0, line: Int = 1, col: Int = 1): Position =
+ if (ix >= maxCursor) Position(maxCursor, line, col)
+ else if (ix >= input.length || input.charAt(ix) != '\n') errorPosition(ix + 1, line, col + 1)
+ else errorPosition(ix + 1, line + 1, 1)
+
+ @tailrec
+ def buildParseError(errorRuleIx: Int = 0, traces: VectorBuilder[RuleTrace] = new VectorBuilder): ParseError = {
+ val ruleFrames: List[RuleFrame] =
+ try {
+ runRule(errorRuleIx)
+ Nil // we managed to complete the run w/o exception, i.e. we have collected all frames
+ } catch {
+ case e: Parser.CollectingRuleStackException ⇒ e.ruleFrames
+ }
+ if (ruleFrames.isEmpty) ParseError(errorPosition(), traces.result())
+ else buildParseError(errorRuleIx + 1, traces += RuleTrace(ruleFrames.toVector))
+ }
+
+ _valueStack = new ValueStack(initialValueStackSize, maxValueStackSize)
+ try {
+ maxCursor = -1
+ if (runRule())
+ scheme.success(valueStack.toHList[L]())
+ else {
+ maxCursor = 0 // establish the error location with the next run
+ if (runRule()) sys.error("Parsing unexpectedly succeeded while trying to establish the error location")
+ // now maxCursor holds the error location, we can now build the parser error info
+ scheme.parseError(buildParseError())
+ }
+ } catch {
+ case NonFatal(e) ⇒ scheme.failure(e)
+ }
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __advance(): Boolean = {
+ var c = _cursor
+ val max = input.length
+ if (c < max) {
+ c += 1
+ _cursor = c
+ _cursorChar = if (c == max) EOI else input charAt c
+ }
+ true
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __updateMaxCursor(): Boolean = {
+ if (_cursor > maxCursor) maxCursor = _cursor
+ true
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __saveState: Mark = new Mark((_cursor.toLong << 32) + (_cursorChar.toLong << 16) + valueStack.size)
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __restoreState(mark: Mark): Unit = {
+ _cursor = (mark.value >>> 32).toInt
+ _cursorChar = ((mark.value >>> 16) & 0x000000000000FFFF).toChar
+ valueStack.size = (mark.value & 0x000000000000FFFF).toInt
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __enterNotPredicate: Int = {
+ val saved = maxCursor
+ maxCursor = -1 // disables maxCursor update as well as error rulestack collection
+ saved
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __exitNotPredicate(saved: Int): Unit = maxCursor = saved
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __registerMismatch(): Boolean = {
+ if (currentErrorRuleStackIx >= 0 && _cursor == maxCursor) {
+ if (mismatchesAtErrorCursor < currentErrorRuleStackIx) mismatchesAtErrorCursor += 1
+ else throw new Parser.CollectingRuleStackException
+ }
+ false
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __push(value: Any): Boolean = {
+ value match {
+ case () ⇒
+ case x: HList ⇒ valueStack.pushAll(x)
+ case x ⇒ valueStack.push(x)
+ }
+ true
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ @tailrec final def __matchString(string: String, ix: Int = 0): Boolean =
+ if (ix < string.length)
+ if (cursorChar == string.charAt(ix)) {
+ __advance()
+ __matchString(string, ix + 1)
+ } else false
+ else true
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ @tailrec final def __matchStringWrapped(string: String, ruleName: String, ix: Int = 0): Boolean =
+ if (ix < string.length)
+ if (cursorChar == string.charAt(ix)) {
+ __advance()
+ __updateMaxCursor()
+ __matchStringWrapped(string, ruleName, ix + 1)
+ } else {
+ try __registerMismatch()
+ catch {
+ case e: Parser.CollectingRuleStackException ⇒
+ e.save(RuleFrame(RuleFrame.StringMatch(string), ruleName), RuleFrame.CharMatch(string charAt ix))
+ }
+ }
+ else true
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ @tailrec final def __matchIgnoreCaseString(string: String, ix: Int = 0): Boolean =
+ if (ix < string.length)
+ if (Character.toLowerCase(cursorChar) == string.charAt(ix)) {
+ __advance()
+ __matchIgnoreCaseString(string, ix + 1)
+ } else false
+ else true
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ @tailrec final def __matchIgnoreCaseStringWrapped(string: String, ruleName: String, ix: Int = 0): Boolean =
+ if (ix < string.length)
+ if (Character.toLowerCase(cursorChar) == string.charAt(ix)) {
+ __advance()
+ __updateMaxCursor()
+ __matchIgnoreCaseStringWrapped(string, ruleName, ix + 1)
+ } else {
+ try __registerMismatch()
+ catch {
+ case e: Parser.CollectingRuleStackException ⇒
+ e.save(RuleFrame(RuleFrame.IgnoreCaseString(string), ruleName), RuleFrame.IgnoreCaseChar(string charAt ix))
+ }
+ }
+ else true
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ @tailrec final def __matchAnyOf(string: String, ix: Int = 0): Boolean =
+ if (ix < string.length)
+ if (string.charAt(ix) == cursorChar) __advance()
+ else __matchAnyOf(string, ix + 1)
+ else false
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ @tailrec final def __matchNoneOf(string: String, ix: Int = 0): Boolean =
+ if (ix < string.length)
+ cursorChar != EOI && string.charAt(ix) != cursorChar && __matchNoneOf(string, ix + 1)
+ else __advance()
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __matchMap(m: Map[String, Any]): Boolean = {
+ val keys = m.keysIterator
+ while (keys.hasNext) {
+ val mark = __saveState
+ val key = keys.next()
+ if (__matchString(key)) {
+ __push(m(key))
+ return true
+ } else __restoreState(mark)
+ }
+ false
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ def __matchMapWrapped(m: Map[String, Any], ruleName: String): Boolean = {
+ val keys = m.keysIterator
+ try {
+ while (keys.hasNext) {
+ val mark = __saveState
+ val key = keys.next()
+ if (__matchStringWrapped(key, "")) {
+ __push(m(key))
+ return true
+ } else __restoreState(mark)
+ }
+ false
+ } catch {
+ case e: Parser.CollectingRuleStackException ⇒ e.save(RuleFrame(RuleFrame.MapMatch(m), ruleName))
+ }
+ }
+
+ protected class __SubParserInput extends ParserInput {
+ val offset = cursor // the number of chars the input the sub-parser sees is offset from the outer input start
+ def getLine(line: Int): String = ??? // TODO
+ def sliceCharArray(start: Int, end: Int): Array[Char] = input.sliceCharArray(start + offset, end + offset)
+ def sliceString(start: Int, end: Int): String = input.sliceString(start + offset, end + offset)
+ def length: Int = input.length - offset
+ def charAt(ix: Int): Char = input.charAt(offset + ix)
+ }
+}
+
+object Parser {
+
+ trait DeliveryScheme[L <: HList] {
+ type Result
+ def success(result: L): Result
+ def parseError(error: ParseError): Result
+ def failure(error: Throwable): Result
+ }
+
+ object DeliveryScheme extends AlternativeDeliverySchemes {
+ implicit def Try[L <: HList, Out](implicit unpack: Unpack.Aux[L, Out]) =
+ new DeliveryScheme[L] {
+ type Result = Try[Out]
+ def success(result: L) = Success(unpack(result))
+ def parseError(error: ParseError) = Failure(error)
+ def failure(error: Throwable) = Failure(error)
+ }
+ }
+ sealed abstract class AlternativeDeliverySchemes {
+ implicit def Either[L <: HList, Out](implicit unpack: Unpack.Aux[L, Out]) =
+ new DeliveryScheme[L] {
+ type Result = Either[ParseError, Out]
+ def success(result: L) = Right(unpack(result))
+ def parseError(error: ParseError) = Left(error)
+ def failure(error: Throwable) = throw error
+ }
+ implicit def Throw[L <: HList, Out](implicit unpack: Unpack.Aux[L, Out]) =
+ new DeliveryScheme[L] {
+ type Result = Out
+ def success(result: L) = unpack(result)
+ def parseError(error: ParseError) = throw error
+ def failure(error: Throwable) = throw error
+ }
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ class Mark private[Parser] (val value: Long) extends AnyVal
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ class CollectingRuleStackException extends RuntimeException with NoStackTrace {
+ private[this] var frames = List.empty[RuleFrame]
+ def save(newFrames: RuleFrame*): Nothing = {
+ frames = newFrames.foldRight(frames)(_ :: _)
+ throw this
+ }
+ def ruleFrames: List[RuleFrame] = frames
+ }
+}
+
+object ParserMacros {
+ import scala.reflect.macros.Context
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ type RunnableRuleContext[L <: HList] = Context { type PrefixType = Rule.Runnable[L] }
+
+ def runImpl[L <: HList: c.WeakTypeTag](c: RunnableRuleContext[L])()(scheme: c.Expr[Parser.DeliveryScheme[L]]): c.Expr[scheme.value.Result] = {
+ import c.universe._
+ val runCall = c.prefix.tree match {
+ case q"parboiled2.this.Rule.Runnable[$l]($ruleExpr)" ⇒ ruleExpr match {
+ case q"$p.$r" if p.tpe <:< typeOf[Parser] ⇒ q"val p = $p; p.__run[$l](p.$r)($scheme)"
+ case q"$p.$r($args)" if p.tpe <:< typeOf[Parser] ⇒ q"val p = $p; p.__run[$l](p.$r($args))($scheme)"
+ case q"$p.$r[$t]" if p.tpe <:< typeOf[Parser] ⇒ q"val p = $p; p.__run[$l](p.$r[$t])($scheme)"
+ case q"$p.$r[$t]" if p.tpe <:< typeOf[RuleX] ⇒ q"__run[$l]($ruleExpr)($scheme)"
+ case x ⇒ c.abort(x.pos, "Illegal `.run()` call base: " + x)
+ }
+ case x ⇒ c.abort(x.pos, "Illegal `Runnable.apply` call: " + x)
+ }
+ c.Expr[scheme.value.Result](runCall)
+ }
+
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ type ParserContext = Context { type PrefixType = Parser }
+
+ def ruleImpl[I <: HList: ctx.WeakTypeTag, O <: HList: ctx.WeakTypeTag](ctx: ParserContext)(r: ctx.Expr[Rule[I, O]]): ctx.Expr[Rule[I, O]] = {
+ val opTreeCtx = new OpTreeContext[ctx.type] { val c: ctx.type = ctx }
+ val opTree = opTreeCtx.OpTree(r.tree)
+ import ctx.universe._
+ val ruleName =
+ ctx.enclosingMethod match {
+ case DefDef(_, name, _, _, _, _) ⇒ name.decoded
+ case _ ⇒ ctx.abort(r.tree.pos, "`rule` can only be used from within a method")
+ }
+ reify {
+ ctx.Expr[RuleX](opTree.renderRule(ruleName)).splice.asInstanceOf[Rule[I, O]]
+ }
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/ParserInput.scala b/akka-parsing/src/main/scala/akka/parboiled2/ParserInput.scala
new file mode 100644
index 0000000000..63f4acd1eb
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/ParserInput.scala
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import java.nio.charset.Charset
+import scala.annotation.tailrec
+import java.nio.ByteBuffer
+
+trait ParserInput {
+ /**
+ * Returns the character at the given (zero-based) index.
+ * Note: this method is hot and should be small and efficient.
+ * A range-check is not required for the parser to work correctly.
+ */
+ def charAt(ix: Int): Char
+
+ /**
+ * The number of characters in this input.
+ * Note: this method is hot and should be small and efficient.
+ */
+ def length: Int
+
+ /**
+ * Returns the characters between index `start` (inclusively) and `end` (exclusively) as a `String`.
+ */
+ def sliceString(start: Int, end: Int): String
+
+ /**
+ * Returns the characters between index `start` (inclusively) and `end` (exclusively) as an `Array[Char]`.
+ */
+ def sliceCharArray(start: Int, end: Int): Array[Char]
+
+ /**
+ * Gets the input line with the given number as a String.
+ * Note: the first line is line number one!
+ */
+ def getLine(line: Int): String
+}
+
+object ParserInput {
+ val Empty = apply(Array.empty[Byte])
+
+ implicit def apply(bytes: Array[Byte]): ByteArrayBasedParserInput = new ByteArrayBasedParserInput(bytes)
+ implicit def apply(string: String): StringBasedParserInput = new StringBasedParserInput(string)
+ implicit def apply(chars: Array[Char]): CharArrayBasedParserInput = new CharArrayBasedParserInput(chars)
+
+ abstract class DefaultParserInput extends ParserInput {
+ def getLine(line: Int): String = {
+ @tailrec def rec(ix: Int, lineStartIx: Int, lineNr: Int): String =
+ if (ix < length)
+ if (charAt(ix) == '\n')
+ if (lineNr < line) rec(ix + 1, ix + 1, lineNr + 1)
+ else sliceString(lineStartIx, ix)
+ else rec(ix + 1, lineStartIx, lineNr)
+ else if (lineNr == line) sliceString(lineStartIx, ix) else ""
+ rec(ix = 0, lineStartIx = 0, lineNr = 1)
+ }
+ }
+
+ /**
+ * ParserInput reading directly off a byte array.
+ * This avoids a separate decoding step but assumes that each byte represents exactly one character,
+ * which is encoded by ISO-8859-1!
+ * You can therefore use this ParserInput type only if you know that all input will be `ISO-8859-1`-encoded,
+ * or only contains 7-bit ASCII characters (which is a subset of ISO-8859-1)!
+ *
+ * Note that this ParserInput type will NOT work with general `UTF-8`-encoded input as this can contain
+ * character representations spanning multiple bytes. However, if you know that your input will only ever contain
+ * 7-bit ASCII characters (0x00-0x7F) then UTF-8 is fine, since the first 127 UTF-8 characters are
+ * encoded with only one byte that is identical to 7-bit ASCII and ISO-8859-1.
+ */
+ class ByteArrayBasedParserInput(bytes: Array[Byte]) extends DefaultParserInput {
+ def charAt(ix: Int) = (bytes(ix) & 0xFF).toChar
+ def length = bytes.length
+ def sliceString(start: Int, end: Int) = new String(bytes, start, end - start, `ISO-8859-1`)
+ def sliceCharArray(start: Int, end: Int) = `ISO-8859-1`.decode(ByteBuffer.wrap(bytes)).array()
+ }
+
+ class StringBasedParserInput(string: String) extends DefaultParserInput {
+ def charAt(ix: Int) = string.charAt(ix)
+ def length = string.length
+ def sliceString(start: Int, end: Int) = string.substring(start, end)
+ def sliceCharArray(start: Int, end: Int) = {
+ val chars = new Array[Char](end - start)
+ string.getChars(start, end, chars, 0)
+ chars
+ }
+ }
+
+ class CharArrayBasedParserInput(chars: Array[Char]) extends DefaultParserInput {
+ def charAt(ix: Int) = chars(ix)
+ def length = chars.length
+ def sliceString(start: Int, end: Int) = new String(chars, start, end - start)
+ def sliceCharArray(start: Int, end: Int) = java.util.Arrays.copyOfRange(chars, start, end)
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/Rule.scala b/akka-parsing/src/main/scala/akka/parboiled2/Rule.scala
new file mode 100644
index 0000000000..363f68f948
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/Rule.scala
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import scala.annotation.unchecked.uncheckedVariance
+import scala.reflect.internal.annotations.compileTimeOnly
+import akka.parboiled2.support._
+import akka.shapeless.HList
+
+sealed trait RuleX
+
+/**
+ * The general model of a parser rule.
+ * It is characterized by consuming a certain number of elements from the value stack (whose types are captured by the
+ * HList type parameter `I` for "Input") and itself pushing a certain number of elements onto the value stack (whose
+ * types are captured by the HList type parameter `O` for "Output").
+ *
+ * At runtime there are only two instances of this class which signal whether the rule has matched (or mismatched)
+ * at the current point in the input.
+ */
+sealed class Rule[-I <: HList, +O <: HList] extends RuleX {
+ // Note: we could model `Rule` as a value class, however, tests have shown that this doesn't result in any measurable
+ // performance benefit and, in addition, comes with other drawbacks (like generated bridge methods)
+
+ /**
+ * Concatenates this rule with the given other one.
+ * The resulting rule type is computed on a type-level.
+ * Here is an illustration (using an abbreviated HList notation):
+ * Rule[, A] ~ Rule[, B] = Rule[, A:B]
+ * Rule[A:B:C, D:E:F] ~ Rule[F, G:H] = Rule[A:B:C, D:E:G:H]
+ * Rule[A, B:C] ~ Rule[D:B:C, E:F] = Rule[D:A, E:F]
+ */
+ @compileTimeOnly("Calls to `~` must be inside `rule` macro")
+ def ~[I2 <: HList, O2 <: HList](that: Rule[I2, O2])(implicit i: TailSwitch[I2, O @uncheckedVariance, I @uncheckedVariance],
+ o: TailSwitch[O @uncheckedVariance, I2, O2]): Rule[i.Out, o.Out] = `n/a`
+
+ /**
+ * Combines this rule with the given other one in a way that the resulting rule matches if this rule matches
+ * or the other one matches. If this rule doesn't match the parser is reset and the given alternative tried.
+ * This operators therefore implements the "ordered choice' PEG combinator.
+ */
+ @compileTimeOnly("Calls to `|` must be inside `rule` macro")
+ def |[I2 <: I, O2 >: O <: HList](that: Rule[I2, O2]): Rule[I2, O2] = `n/a`
+
+ /**
+ * Creates a "negative syntactic predicate", i.e. a rule that matches only if this rule mismatches and vice versa.
+ * The resulting rule doesn't cause the parser to make any progress (i.e. match any input) and also clears out all
+ * effects that the underlying rule might have had on the value stack.
+ */
+ @compileTimeOnly("Calls to `unary_!` must be inside `rule` macro")
+ def unary_!(): Rule0 = `n/a`
+}
+
+/**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+object Rule extends Rule0 {
+ /**
+ * THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing!
+ */
+ implicit class Runnable[L <: HList](rule: RuleN[L]) {
+ def run()(implicit scheme: Parser.DeliveryScheme[L]): scheme.Result = macro ParserMacros.runImpl[L]
+ }
+}
+
+abstract class RuleDSL
+ extends RuleDSLBasics
+ with RuleDSLCombinators
+ with RuleDSLActions
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLActions.scala b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLActions.scala
new file mode 100644
index 0000000000..39bdd85da2
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLActions.scala
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import scala.reflect.internal.annotations.compileTimeOnly
+import akka.shapeless.ops.hlist.Prepend
+import akka.parboiled2.support._
+import akka.shapeless._
+
+trait RuleDSLActions {
+
+ /**
+ * Pushes the input text matched by its inner rule onto the value stack
+ * after its inner rule has been run successfully (and only then).
+ */
+ @compileTimeOnly("Calls to `capture` must be inside `rule` macro")
+ def capture[I <: HList, O <: HList](r: Rule[I, O])(implicit p: Prepend[O, String :: HNil]): Rule[I, p.Out] = `n/a`
+
+ /**
+ * Implements a semantic predicate. If the argument expression evaluates to `true` the created
+ * rule matches otherwise it doesn't.
+ */
+ @compileTimeOnly("Calls to `test` must be inside `rule` macro")
+ def test(condition: Boolean): Rule0 = `n/a`
+
+ /**
+ * Runs the given block / expression / action function.
+ * A `run` rule can have several shapes, depending on its argument type. If the `arg` evaluates to
+ *
+ * - a rule (i.e. has type `R <: Rule[_, _]`) the result type of `run` is this rule's type (i.e. `R`) and the
+ * produced rule is immediately executed.
+ *
+ * - a function with 1 to 5 parameters these parameters are mapped against the top of the value stack, popped
+ * and the function executed. Thereby the function behaves just like an action function for the `~>` operator,
+ * i.e. if it produces a Unit value this result is simply dropped. HList results are pushed onto the value stack
+ * (all their elements individually), rule results are immediately executed and other result values are pushed
+ * onto the value stack as a single element.
+ *
+ * - a function with one HList parameter the behavior is similar to the previous case with the difference that the
+ * elements of this parameter HList are mapped against the value stack top. This allows for consumption of an
+ * arbitrary number of value stack elements.
+ *
+ * - any other value the result type of `run` is an always succeeding `Rule0`.
+ *
+ * NOTE: Even though the block is not a call-by-name parameter it will be executed
+ * for every rule application anew! (Since the expression is directly transplanted
+ * into the rule method by the `rule` macro.
+ */
+
+ @compileTimeOnly("Calls to `run` must be inside `rule` macro")
+ def run[T](arg: T)(implicit rr: RunResult[T]): rr.Out = `n/a`
+
+ /**
+ * Pushes the given value onto the value stack.
+ * - if `T` is `Unit` nothing is pushed (i.e. `push` with a block/expression evaluating to `Unit` is identical to `run`)
+ * - if `T <: HList` all values of the HList is pushed as individual elements
+ * - otherwise a single value of type `T` is pushed.
+ */
+ @compileTimeOnly("Calls to `push` must be inside `rule` macro")
+ def push[T](value: T)(implicit h: HListable[T]): RuleN[h.Out] = `n/a`
+
+ /**
+ * Drops one or more values from the top of the value stack.
+ * E.g. `drop[Int]` will drop the top ``Int`` value and `drop[Int :: String :: HNil]` will drop the top two values,
+ * which must be an ``Int`` underneath a ``String`` (the string being the top stack element).
+ */
+ @compileTimeOnly("Calls to `drop` must be inside `rule` macro")
+ def drop[T](implicit h: HListable[T]): PopRule[h.Out] = `n/a`
+
+ @compileTimeOnly("Calls to `rule2ActionOperator` must be inside `rule` macro")
+ implicit def rule2ActionOperator[I <: HList, O <: HList](r: Rule[I, O])(implicit ops: ActionOps[I, O]): ActionOperator[I, O, ops.Out] = `n/a`
+ sealed trait ActionOperator[I <: HList, O <: HList, Ops] {
+ def ~> : Ops
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLBasics.scala b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLBasics.scala
new file mode 100644
index 0000000000..2cddaf43be
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLBasics.scala
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import scala.reflect.internal.annotations.compileTimeOnly
+import akka.parboiled2.support._
+import akka.shapeless.HList
+
+trait RuleDSLBasics {
+
+ /**
+ * Matches the given single character.
+ */
+ @compileTimeOnly("Calls to `ch` must be inside `rule` macro")
+ implicit def ch(c: Char): Rule0 = `n/a`
+
+ /**
+ * Matches the given string of characters.
+ */
+ @compileTimeOnly("Calls to `str` must be inside `rule` macro")
+ implicit def str(s: String): Rule0 = `n/a`
+
+ /**
+ * Matches any (single) character matched by the given `CharPredicate`.
+ */
+ @compileTimeOnly("Calls to `predicate` must be inside `rule` macro")
+ implicit def predicate(p: CharPredicate): Rule0 = `n/a`
+
+ /**
+ * Matches any of the given maps keys and pushes the respective value upon
+ * a successful match.
+ */
+ @compileTimeOnly("Calls to `valueMap` must be inside `rule` macro")
+ implicit def valueMap[T](m: Map[String, T])(implicit h: HListable[T]): RuleN[h.Out] = `n/a`
+
+ /**
+ * Matches any single one of the given characters.
+ */
+ @compileTimeOnly("Calls to `anyOf` must be inside `rule` macro")
+ def anyOf(chars: String): Rule0 = `n/a`
+
+ /**
+ * Matches any single character except the ones in the given string and except EOI.
+ */
+ @compileTimeOnly("Calls to `noneOf` must be inside `rule` macro")
+ def noneOf(chars: String): Rule0 = `n/a`
+
+ /**
+ * Matches the given single character case insensitively.
+ * Note: the given character must be specified in lower-case!
+ * This requirement is currently NOT enforced!
+ */
+ @compileTimeOnly("Calls to `ignoreCase` must be inside `rule` macro")
+ def ignoreCase(c: Char): Rule0 = `n/a`
+
+ /**
+ * Matches the given string of characters case insensitively.
+ * Note: the given string must be specified in all lower-case!
+ * This requirement is currently NOT enforced!
+ */
+ @compileTimeOnly("Calls to `ignoreCase` must be inside `rule` macro")
+ def ignoreCase(s: String): Rule0 = `n/a`
+
+ /**
+ * Matches any character except EOI.
+ */
+ @compileTimeOnly("Calls to `ANY` must be inside `rule` macro")
+ def ANY: Rule0 = `n/a`
+
+ /**
+ * Matches the EOI (end-of-input) character.
+ */
+ def EOI: Char = akka.parboiled2.EOI
+
+ /**
+ * Matches no character (i.e. doesn't cause the parser to make any progress) but succeeds always (as a rule).
+ */
+ def MATCH: Rule0 = Rule
+
+ /**
+ * A rule that always fails.
+ */
+ def MISMATCH[I <: HList, O <: HList]: Rule[I, O] = null
+ def MISMATCH0: Rule0 = MISMATCH
+
+ @compileTimeOnly("Calls to `str2CharRangeSupport` must be inside `rule` macro")
+ implicit def str2CharRangeSupport(s: String): CharRangeSupport = `n/a`
+ sealed trait CharRangeSupport {
+ def -(other: String): Rule0
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLCombinators.scala b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLCombinators.scala
new file mode 100644
index 0000000000..4be8d36cde
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLCombinators.scala
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import scala.reflect.internal.annotations.compileTimeOnly
+import scala.collection.immutable
+import akka.parboiled2.support._
+import akka.shapeless._
+
+trait RuleDSLCombinators {
+
+ /**
+ * Runs its inner rule and succeeds even if the inner rule doesn't.
+ * Resulting rule type is
+ * Rule0 if r == Rule0
+ * Rule1[Option[T]] if r == Rule1[T]
+ * Rule[I, O] if r == Rule[I, O <: I] // so called "reduction", which leaves the value stack unchanged on a type level
+ */
+ @compileTimeOnly("Calls to `optional` must be inside `rule` macro")
+ def optional[I <: HList, O <: HList](r: Rule[I, O])(implicit o: Lifter[Option, I, O]): Rule[o.In, o.Out] = `n/a`
+
+ /**
+ * Runs its inner rule until it fails, always succeeds.
+ * Resulting rule type is
+ * Rule0 if r == Rule0
+ * Rule1[Seq[T]] if r == Rule1[T]
+ * Rule[I, O] if r == Rule[I, O <: I] // so called "reduction", which leaves the value stack unchanged on a type level
+ */
+ @compileTimeOnly("Calls to `zeroOrMore` must be inside `rule` macro")
+ def zeroOrMore[I <: HList, O <: HList](r: Rule[I, O])(implicit s: Lifter[immutable.Seq, I, O]): Rule[s.In, s.Out] with Repeated = `n/a`
+
+ /**
+ * Runs its inner rule until it fails, succeeds if its inner rule succeeded at least once.
+ * Resulting rule type is
+ * Rule0 if r == Rule0
+ * Rule1[Seq[T]] if r == Rule1[T]
+ * Rule[I, O] if r == Rule[I, O <: I] // so called "reduction", which leaves the value stack unchanged on a type level
+ */
+ @compileTimeOnly("Calls to `oneOrMore` must be inside `rule` macro")
+ def oneOrMore[I <: HList, O <: HList](r: Rule[I, O])(implicit s: Lifter[immutable.Seq, I, O]): Rule[s.In, s.Out] with Repeated = `n/a`
+
+ /**
+ * Runs its inner rule but resets the parser (cursor and value stack) afterwards,
+ * succeeds only if its inner rule succeeded.
+ */
+ @compileTimeOnly("Calls to `&` must be inside `rule` macro")
+ def &(r: Rule[_, _]): Rule0 = `n/a`
+
+ /**
+ * Allows creation of a sub parser and running of one of its rules as part of the current parsing process.
+ * The subparser will start parsing at the current input position and the outer parser (this parser)
+ * will continue where the sub-parser stopped.
+ */
+ @compileTimeOnly("Calls to `runSubParser` must be inside `rule` macro")
+ def runSubParser[I <: HList, O <: HList](f: ParserInput ⇒ Rule[I, O]): Rule[I, O] = `n/a`
+
+ @compileTimeOnly("Calls to `int2NTimes` must be inside `rule` macro")
+ implicit def int2NTimes(i: Int): NTimes = `n/a`
+ @compileTimeOnly("Calls to `range2NTimes` must be inside `rule` macro")
+ implicit def range2NTimes(range: Range): NTimes = `n/a`
+ sealed trait NTimes {
+ /**
+ * Repeats the given sub rule `r` the given number of times.
+ * Both bounds of the range must be non-negative and the upper bound must be >= the lower bound.
+ * If the upper bound is zero the rule is equivalent to `MATCH`.
+ *
+ * Resulting rule type is
+ * Rule0 if r == Rule0
+ * Rule1[Seq[T]] if r == Rule1[T]
+ * Rule[I, O] if r == Rule[I, O <: I] // so called "reduction", which leaves the value stack unchanged on a type level
+ */
+ @compileTimeOnly("Calls to `times` must be inside `rule` macro")
+ def times[I <: HList, O <: HList](r: Rule[I, O])(implicit s: Lifter[immutable.Seq, I, O]): Rule[s.In, s.Out] with Repeated
+ }
+
+ // phantom type for WithSeparatedBy pimp
+ trait Repeated
+
+ @compileTimeOnly("Calls to `rule2WithSeparatedBy` constructor must be inside `rule` macro")
+ implicit def rule2WithSeparatedBy[I <: HList, O <: HList](r: Rule[I, O] with Repeated): WithSeparatedBy[I, O] = `n/a`
+ trait WithSeparatedBy[I <: HList, O <: HList] {
+ def separatedBy(separator: Rule0): Rule[I, O] = `n/a`
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/ValueStack.scala b/akka-parsing/src/main/scala/akka/parboiled2/ValueStack.scala
new file mode 100644
index 0000000000..a9141bd0b5
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/ValueStack.scala
@@ -0,0 +1,255 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+import scala.annotation.tailrec
+import akka.shapeless._
+
+/**
+ * A mutable untyped stack of values.
+ * In most cases you won't have to access its API directly since parboiled2's DSL
+ * should allow you a safer and easier way to interact with the stack.
+ * However, in some cases, when you know what you are doing, direct access can be helpful.
+ */
+class ValueStack private[parboiled2] (initialSize: Int, maxSize: Int) extends Iterable[Any] {
+
+ private[this] var buffer = new Array[Any](initialSize)
+ private[this] var _size = 0
+
+ private[parboiled2] def size_=(newSize: Int): Unit = _size = newSize
+
+ /**
+ * The number of elements currently on the stack.
+ */
+ override def size: Int = _size
+
+ /**
+ * True if no elements are currently on the stack.
+ */
+ override def isEmpty: Boolean = _size == 0
+
+ /**
+ * Removes all elements from the stack.
+ */
+ def clear(): Unit = _size = 0
+
+ /**
+ * Puts the given value onto the stack.
+ * Throws a `ValueStackOverflowException` if the stack has no more space available.
+ */
+ def push(value: Any): Unit = {
+ val oldSize = _size
+ val newSize = oldSize + 1
+ ensureSize(newSize)
+ buffer(oldSize) = value
+ _size = newSize
+ }
+
+ /**
+ * Puts the given HList of values onto the stack.
+ * Throws a `ValueStackOverflowException` if the stack has no more space available.
+ */
+ @tailrec final def pushAll(hlist: HList): Unit =
+ hlist match {
+ case akka.shapeless.::(head, tail) ⇒
+ push(head)
+ pushAll(tail)
+ case HNil ⇒
+ }
+
+ /**
+ * Inserts the given value into the stack `down` elements below the current
+ * top element. `insert(0, 'x')` is therefore equal to `push('x')`.
+ * Throws a `ValueStackOverflowException` if the stack has no more space available.
+ * Throws a `ValueStackUnderflowException` if `down > size`.
+ * Throws an `IllegalArgumentException` is `down` is negative.
+ */
+ def insert(down: Int, value: Any): Unit =
+ math.signum(down) match {
+ case -1 ⇒ throw new IllegalArgumentException("`down` must not be negative")
+ case 0 ⇒ push(value)
+ case 1 ⇒
+ if (down > _size) throw new ValueStackUnderflowException
+ val newSize = _size + 1
+ ensureSize(newSize)
+ val targetIx = _size - down
+ System.arraycopy(buffer, targetIx, buffer, targetIx + 1, down)
+ buffer(targetIx) = value
+ _size = newSize
+ }
+
+ /**
+ * Removes the top element from the stack and returns it.
+ * Throws a `ValueStackUnderflowException` if the stack is empty.
+ */
+ def pop(): Any =
+ if (_size > 0) {
+ val newSize = _size - 1
+ _size = newSize
+ buffer(newSize)
+ } else throw new ValueStackUnderflowException
+
+ /**
+ * Removes the element `down` elements below the current top element from the stack
+ * and returns it. `pullOut(0)` is therefore equal to `pop()`.
+ * Throws a `ValueStackUnderflowException` if `down >= size`.
+ * Throws an `IllegalArgumentException` is `down` is negative.
+ */
+ def pullOut(down: Int): Any =
+ math.signum(down) match {
+ case -1 ⇒ throw new IllegalArgumentException("`down` must not be negative")
+ case 0 ⇒ pop()
+ case 1 ⇒
+ if (down >= _size) throw new ValueStackUnderflowException
+ val newSize = _size - 1
+ val targetIx = newSize - down
+ val result = buffer(targetIx)
+ System.arraycopy(buffer, targetIx + 1, buffer, targetIx, down)
+ _size = newSize
+ result
+ }
+
+ /**
+ * Returns the top element without removing it.
+ * Throws a `ValueStackUnderflowException` if the stack is empty.
+ */
+ def peek: Any =
+ if (_size > 0) buffer(_size - 1)
+ else throw new ValueStackUnderflowException
+
+ /**
+ * Returns the element `down` elements below the current top element without removing it.
+ * `peek(0)` is therefore equal to `peek()`.
+ * Throws a `ValueStackUnderflowException` if `down >= size`.
+ * Throws an `IllegalArgumentException` is `down` is negative.
+ */
+ def peek(down: Int): Any =
+ math.signum(down) match {
+ case -1 ⇒ throw new IllegalArgumentException("`down` must not be negative")
+ case 0 ⇒ peek
+ case 1 ⇒
+ if (down >= _size) throw new ValueStackUnderflowException
+ else buffer(_size - down - 1)
+ }
+
+ /**
+ * Replaces the element `down` elements below the current top element with the given one.
+ * Throws a `ValueStackUnderflowException` if `down >= size`.
+ * Throws an `IllegalArgumentException` if `down` is negative.
+ */
+ def poke(down: Int, value: Any): Unit = {
+ if (down >= _size) throw new ValueStackUnderflowException
+ require(down >= 0, "`down` must be >= 0")
+ buffer(_size - down - 1) = value
+ }
+
+ /**
+ * Swaps the top 2 stack elements.
+ * Throws a `ValueStackUnderflowException` if `size < 2`.
+ */
+ def swap(): Unit = {
+ if (_size < 2) throw new ValueStackUnderflowException
+ val temp = buffer(_size - 1)
+ buffer(_size - 1) = buffer(_size - 2)
+ buffer(_size - 2) = temp
+ }
+
+ /**
+ * Swaps the top 3 stack elements.
+ * Throws a `ValueStackUnderflowException` if `size < 3`.
+ */
+ def swap3(): Unit = {
+ if (_size < 3) throw new ValueStackUnderflowException
+ val temp = buffer(_size - 1)
+ buffer(_size - 1) = buffer(_size - 3)
+ buffer(_size - 3) = temp
+ }
+
+ /**
+ * Swaps the top 4 stack elements.
+ * Throws a `ValueStackUnderflowException` if `size < 4`.
+ */
+ def swap4(): Unit = {
+ if (_size < 4) throw new ValueStackUnderflowException
+ var temp = buffer(_size - 1)
+ buffer(_size - 1) = buffer(_size - 4)
+ buffer(_size - 4) = temp
+ temp = buffer(_size - 2)
+ buffer(_size - 2) = buffer(_size - 3)
+ buffer(_size - 3) = temp
+ }
+
+ /**
+ * Swaps the top 5 stack elements.
+ * Throws a `ValueStackUnderflowException` if `size < 5`.
+ */
+ def swap5(): Unit = {
+ if (_size < 5) throw new ValueStackUnderflowException
+ var temp = buffer(_size - 1)
+ buffer(_size - 1) = buffer(_size - 5)
+ buffer(_size - 5) = temp
+ temp = buffer(_size - 2)
+ buffer(_size - 2) = buffer(_size - 4)
+ buffer(_size - 4) = temp
+ }
+
+ /**
+ * Returns all current stack elements as a new array.
+ */
+ def toArray: Array[Any] = {
+ val a = new Array[Any](_size)
+ System.arraycopy(buffer, 0, a, 0, _size)
+ a
+ }
+
+ /**
+ * Copies all elements between the given `start` (inclusive) and `end` (exclusive)
+ * indices into an HList that is prepended to the given tail.
+ * Throws an `IllegalArgumentException` if `start < 0 || start > end`.
+ * Throws a `ValueStackUnderflowException` if `end > size`.
+ */
+ @tailrec final def toHList[L <: HList](start: Int = 0, end: Int = _size, prependTo: HList = HNil): L = {
+ require(0 <= start && start <= end, "`start` must be >= 0 and <= `end`")
+ if (start == end) prependTo.asInstanceOf[L]
+ else toHList[L](start, end - 1, buffer(end - 1) :: prependTo)
+ }
+
+ /**
+ * Creates a string representation of the current value stack contents.
+ * Mostly useful for debugging.
+ */
+ def show: String = mkString("[", ", ", "]")
+
+ /**
+ * Returns an iterator that iterates over a *snapshot* of the stack elements
+ * at the time of this method call. I.e. subsequent mutations are not visible
+ * to the iterator.
+ */
+ def iterator: Iterator[Any] = toArray.iterator
+
+ private def ensureSize(requiredSize: Int): Unit =
+ if (buffer.length < requiredSize)
+ if (requiredSize <= maxSize) {
+ val newSize = math.min(math.max(buffer.length * 2, requiredSize), maxSize)
+ val newBuffer = new Array[Any](newSize)
+ System.arraycopy(buffer, 0, newBuffer, 0, _size)
+ buffer = newBuffer
+ } else throw new ValueStackOverflowException
+}
+
+class ValueStackOverflowException extends RuntimeException
+class ValueStackUnderflowException extends RuntimeException
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/package.scala b/akka-parsing/src/main/scala/akka/parboiled2/package.scala
new file mode 100644
index 0000000000..9068f08531
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/package.scala
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka
+
+import akka.shapeless._
+import java.nio.charset.Charset
+
+package object parboiled2 {
+
+ type Rule0 = RuleN[HNil]
+ type Rule1[T] = RuleN[T :: HNil]
+ type Rule2[A, B] = RuleN[A :: B :: HNil]
+ type RuleN[L <: HList] = Rule[HNil, L]
+ type PopRule[L <: HList] = Rule[L, HNil]
+
+ val EOI = '\uFFFF'
+
+ val UTF8 = Charset.forName("UTF-8")
+ val `ISO-8859-1` = Charset.forName("ISO-8859-1")
+
+ val EmptyArray = Array.empty[Any]
+}
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOps.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOps.scala
new file mode 100644
index 0000000000..79c47634ab
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOps.scala
@@ -0,0 +1,233 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2.support
+
+import akka.shapeless._
+import akka.parboiled2.Rule
+import akka.shapeless.ops.hlist.ReversePrepend
+
+// format: OFF
+
+// provides the supported `~>` "overloads" for rule of type `Rule[I, O]` as `Out`
+// as a phantom type, which is only used for rule DSL typing
+sealed trait ActionOps[I <: HList, O <: HList] { type Out }
+object ActionOps {
+ private type SJoin[I <: HList, O <: HList, R] = Join[I, HNil, O, R]
+
+ implicit def ops0[I <: HList, O <: HNil]: ActionOps[I, O] { type Out = Ops0[I] } = `n/a`
+ sealed trait Ops0[I <: HList] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: Z ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[Z ⇒ R]): Rule[j.In, j.Out]
+ def apply[Y, Z, R](f: (Y, Z) ⇒ R)(implicit j: SJoin[Y :: Z :: I, HNil, R], c: FCapture[(Y, Z) ⇒ R]): Rule[j.In, j.Out]
+ def apply[X, Y, Z, R](f: (X, Y, Z) ⇒ R)(implicit j: SJoin[X :: Y :: Z :: I, HNil, R], c: FCapture[(X, Y, Z) ⇒ R]): Rule[j.In, j.Out]
+ def apply[W, X, Y, Z, R](f: (W, X, Y, Z) ⇒ R)(implicit j: SJoin[W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(W, X, Y, Z) ⇒ R]): Rule[j.In, j.Out]
+ def apply[V, W, X, Y, Z, R](f: (V, W, X, Y, Z) ⇒ R)(implicit j: SJoin[V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(V, W, X, Y, Z) ⇒ R]): Rule[j.In, j.Out]
+ def apply[U, V, W, X, Y, Z, R](f: (U, V, W, X, Y, Z) ⇒ R)(implicit j: SJoin[U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(U, V, W, X, Y, Z) ⇒ R]): Rule[j.In, j.Out]
+ def apply[T, U, V, W, X, Y, Z, R](f: (T, U, V, W, X, Y, Z) ⇒ R)(implicit j: SJoin[T :: U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(T, U, V, W, X, Y, Z) ⇒ R]): Rule[j.In, j.Out]
+ def apply[S, T, U, V, W, X, Y, Z, R](f: (S, T, U, V, W, X, Y, Z) ⇒ R)(implicit j: SJoin[S :: T :: U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(S, T, U, V, W, X, Y, Z) ⇒ R]): Rule[j.In, j.Out]
+ def apply[P, S, T, U, V, W, X, Y, Z, R](f: (P, S, T, U, V, W, X, Y, Z) ⇒ R)(implicit j: SJoin[P :: S :: T :: U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(P, S, T, U, V, W, X, Y, Z) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops1[I <: HList, A]: ActionOps[I, A :: HNil] { type Out = Ops1[I, A] } = `n/a`
+ sealed trait Ops1[I <: HList, A] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, A :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: A ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[A ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: (Z, A) ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[(Z, A) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Y, Z, R](f: (Y, Z, A) ⇒ R)(implicit j: SJoin[Y :: Z :: I, HNil, R], c: FCapture[(Y, Z, A) ⇒ R]): Rule[j.In, j.Out]
+ def apply[X, Y, Z, R](f: (X, Y, Z, A) ⇒ R)(implicit j: SJoin[X :: Y :: Z :: I, HNil, R], c: FCapture[(X, Y, Z, A) ⇒ R]): Rule[j.In, j.Out]
+ def apply[W, X, Y, Z, R](f: (W, X, Y, Z, A) ⇒ R)(implicit j: SJoin[W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(W, X, Y, Z, A) ⇒ R]): Rule[j.In, j.Out]
+ def apply[V, W, X, Y, Z, R](f: (V, W, X, Y, Z, A) ⇒ R)(implicit j: SJoin[V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(V, W, X, Y, Z, A) ⇒ R]): Rule[j.In, j.Out]
+ def apply[U, V, W, X, Y, Z, R](f: (U, V, W, X, Y, Z, A) ⇒ R)(implicit j: SJoin[U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(U, V, W, X, Y, Z, A) ⇒ R]): Rule[j.In, j.Out]
+ def apply[T, U, V, W, X, Y, Z, R](f: (T, U, V, W, X, Y, Z, A) ⇒ R)(implicit j: SJoin[T :: U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(T, U, V, W, X, Y, Z, A) ⇒ R]): Rule[j.In, j.Out]
+ def apply[S, T, U, V, W, X, Y, Z, R](f: (S, T, U, V, W, X, Y, Z, A) ⇒ R)(implicit j: SJoin[S :: T :: U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(S, T, U, V, W, X, Y, Z, A) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops2[I <: HList, A, B]: ActionOps[I, A :: B :: HNil] { type Out = Ops2[I, A, B] } = `n/a`
+ sealed trait Ops2[I <: HList, A, B] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, A :: B :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: B ⇒ R)(implicit j: SJoin[I, A :: HNil, R], c: FCapture[B ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (A, B) ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[(A, B) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: (Z, A, B) ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[(Z, A, B) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Y, Z, R](f: (Y, Z, A, B) ⇒ R)(implicit j: SJoin[Y :: Z :: I, HNil, R], c: FCapture[(Y, Z, A, B) ⇒ R]): Rule[j.In, j.Out]
+ def apply[X, Y, Z, R](f: (X, Y, Z, A, B) ⇒ R)(implicit j: SJoin[X :: Y :: Z :: I, HNil, R], c: FCapture[(X, Y, Z, A, B) ⇒ R]): Rule[j.In, j.Out]
+ def apply[W, X, Y, Z, R](f: (W, X, Y, Z, A, B) ⇒ R)(implicit j: SJoin[W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(W, X, Y, Z, A, B) ⇒ R]): Rule[j.In, j.Out]
+ def apply[V, W, X, Y, Z, R](f: (V, W, X, Y, Z, A, B) ⇒ R)(implicit j: SJoin[V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(V, W, X, Y, Z, A, B) ⇒ R]): Rule[j.In, j.Out]
+ def apply[U, V, W, X, Y, Z, R](f: (U, V, W, X, Y, Z, A, B) ⇒ R)(implicit j: SJoin[U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(U, V, W, X, Y, Z, A, B) ⇒ R]): Rule[j.In, j.Out]
+ def apply[T, U, V, W, X, Y, Z, R](f: (T, U, V, W, X, Y, Z, A, B) ⇒ R)(implicit j: SJoin[T :: U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(T, U, V, W, X, Y, Z, A, B) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops3[I <: HList, A, B, C]: ActionOps[I, A :: B :: C :: HNil] { type Out = Ops3[I, A, B, C] } = `n/a`
+ sealed trait Ops3[I <: HList, A, B, C] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, A :: B :: C :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: C ⇒ R)(implicit j: SJoin[I, A :: B :: HNil, R], c: FCapture[C ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (B, C) ⇒ R)(implicit j: SJoin[I, A :: HNil, R], c: FCapture[(B, C) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (A, B, C) ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[(A, B, C) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: (Z, A, B, C) ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[(Z, A, B, C) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Y, Z, R](f: (Y, Z, A, B, C) ⇒ R)(implicit j: SJoin[Y :: Z :: I, HNil, R], c: FCapture[(Y, Z, A, B, C) ⇒ R]): Rule[j.In, j.Out]
+ def apply[X, Y, Z, R](f: (X, Y, Z, A, B, C) ⇒ R)(implicit j: SJoin[X :: Y :: Z :: I, HNil, R], c: FCapture[(X, Y, Z, A, B, C) ⇒ R]): Rule[j.In, j.Out]
+ def apply[W, X, Y, Z, R](f: (W, X, Y, Z, A, B, C) ⇒ R)(implicit j: SJoin[W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(W, X, Y, Z, A, B, C) ⇒ R]): Rule[j.In, j.Out]
+ def apply[V, W, X, Y, Z, R](f: (V, W, X, Y, Z, A, B, C) ⇒ R)(implicit j: SJoin[V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(V, W, X, Y, Z, A, B, C) ⇒ R]): Rule[j.In, j.Out]
+ def apply[U, V, W, X, Y, Z, R](f: (U, V, W, X, Y, Z, A, B, C) ⇒ R)(implicit j: SJoin[U :: V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(U, V, W, X, Y, Z, A, B, C) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops4[I <: HList, A, B, C, D]: ActionOps[I, A :: B :: C :: D :: HNil] { type Out = Ops4[I, A, B, C, D] } = `n/a`
+ sealed trait Ops4[I <: HList, A, B, C, D] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: D ⇒ R)(implicit j: SJoin[I, A :: B :: C :: HNil, R], c: FCapture[D ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (C, D) ⇒ R)(implicit j: SJoin[I, A :: B :: HNil, R], c: FCapture[(C, D) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (B, C, D) ⇒ R)(implicit j: SJoin[I, A :: HNil, R], c: FCapture[(B, C, D) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (A, B, C, D) ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[(A, B, C, D) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: (Z, A, B, C, D) ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[(Z, A, B, C, D) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Y, Z, R](f: (Y, Z, A, B, C, D) ⇒ R)(implicit j: SJoin[Y :: Z :: I, HNil, R], c: FCapture[(Y, Z, A, B, C, D) ⇒ R]): Rule[j.In, j.Out]
+ def apply[X, Y, Z, R](f: (X, Y, Z, A, B, C, D) ⇒ R)(implicit j: SJoin[X :: Y :: Z :: I, HNil, R], c: FCapture[(X, Y, Z, A, B, C, D) ⇒ R]): Rule[j.In, j.Out]
+ def apply[W, X, Y, Z, R](f: (W, X, Y, Z, A, B, C, D) ⇒ R)(implicit j: SJoin[W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(W, X, Y, Z, A, B, C, D) ⇒ R]): Rule[j.In, j.Out]
+ def apply[V, W, X, Y, Z, R](f: (V, W, X, Y, Z, A, B, C, D) ⇒ R)(implicit j: SJoin[V :: W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(V, W, X, Y, Z, A, B, C, D) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops5[I <: HList, A, B, C, D, E]: ActionOps[I, A :: B :: C :: D :: E :: HNil] { type Out = Ops5[I, A, B, C, D, E] } = `n/a`
+ sealed trait Ops5[I <: HList, A, B, C, D, E] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: E ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: HNil, R], c: FCapture[E ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (D, E) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: HNil, R], c: FCapture[(D, E) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (C, D, E) ⇒ R)(implicit j: SJoin[I, A :: B :: HNil, R], c: FCapture[(C, D, E) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (B, C, D, E) ⇒ R)(implicit j: SJoin[I, A :: HNil, R], c: FCapture[(B, C, D, E) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (A, B, C, D, E) ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[(A, B, C, D, E) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: (Z, A, B, C, D, E) ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[(Z, A, B, C, D, E) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Y, Z, R](f: (Y, Z, A, B, C, D, E) ⇒ R)(implicit j: SJoin[Y :: Z :: I, HNil, R], c: FCapture[(Y, Z, A, B, C, D, E) ⇒ R]): Rule[j.In, j.Out]
+ def apply[X, Y, Z, R](f: (X, Y, Z, A, B, C, D, E) ⇒ R)(implicit j: SJoin[X :: Y :: Z :: I, HNil, R], c: FCapture[(X, Y, Z, A, B, C, D, E) ⇒ R]): Rule[j.In, j.Out]
+ def apply[W, X, Y, Z, R](f: (W, X, Y, Z, A, B, C, D, E) ⇒ R)(implicit j: SJoin[W :: X :: Y :: Z :: I, HNil, R], c: FCapture[(W, X, Y, Z, A, B, C, D, E) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops6[I <: HList, A, B, C, D, E, F]: ActionOps[I, A :: B :: C :: D :: E :: F :: HNil] { type Out = Ops6[I, A, B, C, D, E, F] } = `n/a`
+ sealed trait Ops6[I <: HList, A, B, C, D, E, F] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: F :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: F ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: HNil, R], c: FCapture[F ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (E, F) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: HNil, R], c: FCapture[(E, F) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (D, E, F) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: HNil, R], c: FCapture[(D, E, F) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (C, D, E, F) ⇒ R)(implicit j: SJoin[I, A :: B :: HNil, R], c: FCapture[(C, D, E, F) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (B, C, D, E, F) ⇒ R)(implicit j: SJoin[I, A :: HNil, R], c: FCapture[(B, C, D, E, F) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (A, B, C, D, E, F) ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[(A, B, C, D, E, F) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: (Z, A, B, C, D, E, F) ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[(Z, A, B, C, D, E, F) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Y, Z, R](f: (Y, Z, A, B, C, D, E, F) ⇒ R)(implicit j: SJoin[Y :: Z :: I, HNil, R], c: FCapture[(Y, Z, A, B, C, D, E, F) ⇒ R]): Rule[j.In, j.Out]
+ def apply[X, Y, Z, R](f: (X, Y, Z, A, B, C, D, E, F) ⇒ R)(implicit j: SJoin[X :: Y :: Z :: I, HNil, R], c: FCapture[(X, Y, Z, A, B, C, D, E, F) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops7[I <: HList, A, B, C, D, E, F, G]: ActionOps[I, A :: B :: C :: D :: E :: F :: G :: HNil] { type Out = Ops7[I, A, B, C, D, E, F, G] } = `n/a`
+ sealed trait Ops7[I <: HList, A, B, C, D, E, F, G] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: F :: G :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: G ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: F :: HNil, R], c: FCapture[G ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (F, G) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: HNil, R], c: FCapture[(F, G) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (E, F, G) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: HNil, R], c: FCapture[(E, F, G) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (D, E, F, G) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: HNil, R], c: FCapture[(D, E, F, G) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (C, D, E, F, G) ⇒ R)(implicit j: SJoin[I, A :: B :: HNil, R], c: FCapture[(C, D, E, F, G) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (B, C, D, E, F, G) ⇒ R)(implicit j: SJoin[I, A :: HNil, R], c: FCapture[(B, C, D, E, F, G) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (A, B, C, D, E, F, G) ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[(A, B, C, D, E, F, G) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: (Z, A, B, C, D, E, F, G) ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[(Z, A, B, C, D, E, F, G) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Y, Z, R](f: (Y, Z, A, B, C, D, E, F, G) ⇒ R)(implicit j: SJoin[Y :: Z :: I, HNil, R], c: FCapture[(Y, Z, A, B, C, D, E, F, G) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops8[I <: HList, A, B, C, D, E, F, G, H]: ActionOps[I, A :: B :: C :: D :: E :: F :: G :: H :: HNil] { type Out = Ops8[I, A, B, C, D, E, F, G, H] } = `n/a`
+ sealed trait Ops8[I <: HList, A, B, C, D, E, F, G, H] {
+ def apply[R](f: () ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: F :: G :: H :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: H ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: F :: G :: HNil, R], c: FCapture[H ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (G, H) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: F :: HNil, R], c: FCapture[(G, H) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (F, G, H) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: E :: HNil, R], c: FCapture[(F, G, H) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (E, F, G, H) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: D :: HNil, R], c: FCapture[(E, F, G, H) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (D, E, F, G, H) ⇒ R)(implicit j: SJoin[I, A :: B :: C :: HNil, R], c: FCapture[(D, E, F, G, H) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (C, D, E, F, G, H) ⇒ R)(implicit j: SJoin[I, A :: B :: HNil, R], c: FCapture[(C, D, E, F, G, H) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (B, C, D, E, F, G, H) ⇒ R)(implicit j: SJoin[I, A :: HNil, R], c: FCapture[(B, C, D, E, F, G, H) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (A, B, C, D, E, F, G, H) ⇒ R)(implicit j: SJoin[I, HNil, R], c: FCapture[(A, B, C, D, E, F, G, H) ⇒ R]): Rule[j.In, j.Out]
+ def apply[Z, R](f: (Z, A, B, C, D, E, F, G, H) ⇒ R)(implicit j: SJoin[Z :: I, HNil, R], c: FCapture[(Z, A, B, C, D, E, F, G, H) ⇒ R]): Rule[j.In, j.Out]
+ }
+ implicit def ops[I <: HList, O <: HList, OI <: HList, A, B, C, D, E, F, G, H, J]
+ (implicit x: TakeRight9[O, OI, A, B, C, D, E, F, G, H, J]): ActionOps[I, O] { type Out = Ops[I, OI, A, B, C, D, E, F, G, H, J] } = `n/a`
+ sealed trait Ops[I <: HList, OI <: HList, A, B, C, D, E, F, G, H, J] {
+ def apply[R](f: () ⇒ R)(implicit j: Join[I, OI, A :: B :: C :: D :: E :: F :: G :: H :: J :: HNil, R], c: FCapture[() ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: J ⇒ R)(implicit j: Join[I, OI, A :: B :: C :: D :: E :: F :: G :: H :: HNil, R], c: FCapture[J ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (H, J) ⇒ R)(implicit j: Join[I, OI, A :: B :: C :: D :: E :: F :: G :: HNil, R], c: FCapture[(H, J) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (G, H, J) ⇒ R)(implicit j: Join[I, OI, A :: B :: C :: D :: E :: F :: HNil, R], c: FCapture[(G, H, J) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (F, G, H, J) ⇒ R)(implicit j: Join[I, OI, A :: B :: C :: D :: E :: HNil, R], c: FCapture[(F, G, H, J) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (E, F, G, H, J) ⇒ R)(implicit j: Join[I, OI, A :: B :: C :: D :: HNil, R], c: FCapture[(E, F, G, H, J) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (D, E, F, G, H, J) ⇒ R)(implicit j: Join[I, OI, A :: B :: C :: HNil, R], c: FCapture[(D, E, F, G, H, J) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (C, D, E, F, G, H, J) ⇒ R)(implicit j: Join[I, OI, A :: B :: HNil, R], c: FCapture[(C, D, E, F, G, H, J) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (B, C, D, E, F, G, H, J) ⇒ R)(implicit j: Join[I, OI, A :: HNil, R], c: FCapture[(B, C, D, E, F, G, H, J) ⇒ R]): Rule[j.In, j.Out]
+ def apply[R](f: (A, B, C, D, E, F, G, H, J) ⇒ R)(implicit j: Join[I, OI, HNil, R], c: FCapture[(A, B, C, D, E, F, G, H, J) ⇒ R]): Rule[j.In, j.Out]
+ }
+}
+
+// we want to support the "short case class notation" `... ~> Foo`
+// unfortunately the Tree for the function argument to the `apply` overloads above does *not* allow us to inspect the
+// function type which is why we capture it separately with this helper type
+sealed trait FCapture[T]
+object FCapture {
+ implicit def apply[T]: FCapture[T] = `n/a`
+}
+
+// builds `In` and `Out` types according to this logic:
+// if (R == Unit)
+// In = I, Out = L1 ::: L2
+// else if (R <: HList)
+// In = I, Out = L1 ::: L2 ::: R
+// else if (R <: Rule[I2, O2])
+// In = TailSwitch[I2, L1 ::: L2, I], Out = TailSwitch[L1 ::: L2, I2, O2]
+// else
+// In = I, Out = L1 ::: L2 ::: R :: HNil
+sealed trait Join[I <: HList, L1 <: HList, L2 <: HList, R] {
+ type In <: HList
+ type Out <: HList
+}
+object Join {
+ implicit def join[I <: HList, L1 <: HList, L2 <: HList, R, In0 <: HList, Out0 <: HList]
+ (implicit x: Aux[I, L1, L2, R, HNil, In0, Out0]): Join[I, L1, L2, R] { type In = In0; type Out = Out0 } = `n/a`
+
+ sealed trait Aux[I <: HList, L1 <: HList, L2 <: HList, R, Acc <: HList, In <: HList, Out <: HList]
+ object Aux extends Aux1 {
+ // if R == Unit convert to HNil
+ implicit def forUnit[I <: HList, L1 <: HList, L2 <: HList, Acc <: HList, Out <: HList]
+ (implicit x: Aux[I, L1, L2, HNil, Acc, I, Out]): Aux[I, L1, L2, Unit, Acc, I, Out] = `n/a`
+
+ // if R <: HList and L1 non-empty move head of L1 to Acc
+ implicit def iter1[I <: HList, H, T <: HList, L2 <: HList, R <: HList, Acc <: HList, Out <: HList]
+ (implicit x: Aux[I, T, L2, R, H :: Acc, I, Out]): Aux[I, H :: T, L2, R, Acc, I, Out] = `n/a`
+
+ // if R <: HList and L1 empty and L2 non-empty move head of L2 to Acc
+ implicit def iter2[I <: HList, H, T <: HList, R <: HList, Acc <: HList, Out <: HList]
+ (implicit x: Aux[I, HNil, T, R, H :: Acc, I, Out]): Aux[I, HNil, H :: T, R, Acc, I, Out] = `n/a`
+
+ // if R <: HList and L1 and L2 empty set Out = reversePrepend Acc before R
+ implicit def terminate[I <: HList, R <: HList, Acc <: HList, Out <: HList]
+ (implicit x: ReversePrepend.Aux[Acc, R, Out]): Aux[I, HNil, HNil, R, Acc, I, Out] = `n/a`
+
+ // if R <: Rule and L1 non-empty move head of L1 to Acc
+ implicit def iterRule1[I <: HList, L2 <: HList, I2 <: HList, O2 <: HList, In0 <: HList, Acc <: HList, Out0 <: HList, H, T <: HList]
+ (implicit x: Aux[I, T, L2, Rule[I2, O2], H :: Acc, In0, Out0]): Aux[I, H :: T, L2, Rule[I2, O2], HNil, In0, Out0] = `n/a`
+
+ // if R <: Rule and L1 empty and Acc non-empty move head of Acc to L2
+ implicit def iterRule2[I <: HList, L2 <: HList, I2 <: HList, O2 <: HList, In0 <: HList, Out0 <: HList, H, T <: HList]
+ (implicit x: Aux[I, HNil, H :: L2, Rule[I2, O2], T, In0, Out0]): Aux[I, HNil, L2, Rule[I2, O2], H :: T, In0, Out0] = `n/a`
+
+ // if R <: Rule and L1 and Acc empty set In and Out to tailswitches result
+ implicit def terminateRule[I <: HList, O <: HList, I2 <: HList, O2 <: HList, In <: HList, Out <: HList]
+ (implicit i: TailSwitch.Aux[I2, I2, O, O, I, HNil, In], o: TailSwitch.Aux[O, O, I2, I2, O2, HNil, Out]): Aux[I, HNil, O, Rule[I2, O2], HNil, In, Out] = `n/a`
+ }
+ abstract class Aux1 {
+ // convert R to R :: HNil
+ implicit def forAny[I <: HList, L1 <: HList, L2 <: HList, R, Acc <: HList, Out <: HList](implicit x: Aux[I, L1, L2, R :: HNil, Acc, I, Out]): Aux[I, L1, L2, R, Acc, I, Out] = `n/a`
+ }
+}
+
+
+sealed trait TakeRight9[L <: HList, Init <: HList, A, B, C, D, E, F, G, H, I]
+object TakeRight9 extends LowerPriorityMatchRight9 {
+ implicit def forHList9[A, B, C, D, E, F, G, H, I]: TakeRight9[A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil, HNil, A, B, C, D, E, F, G, H, I] = `n/a`
+}
+private[parboiled2] abstract class LowerPriorityMatchRight9 {
+ implicit def forHList[Head, Tail <: HList, Init <: HList, A, B, C, D, E, F, G, H, I]
+ (implicit x: TakeRight9[Tail, Init, A, B, C, D, E, F, G, H, I]): TakeRight9[Head :: Tail, Head :: Init, A, B, C, D, E, F, G, H, I] = `n/a`
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/HListable.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/HListable.scala
new file mode 100644
index 0000000000..e528549a63
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/support/HListable.scala
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2.support
+
+import akka.shapeless._
+
+trait HListable[T] {
+ type Out <: HList
+}
+
+object HListable extends LowerPriorityHListable {
+ implicit def fromUnit: HListable[Unit] { type Out = HNil } = `n/a`
+ implicit def fromHList[T <: HList]: HListable[T] { type Out = T } = `n/a`
+}
+
+abstract class LowerPriorityHListable {
+ implicit def fromAnyRef[T]: HListable[T] { type Out = T :: HNil } = `n/a`
+}
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/Lifter.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/Lifter.scala
new file mode 100644
index 0000000000..4d95d88a08
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/support/Lifter.scala
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2.support
+
+import scala.annotation.implicitNotFound
+import akka.shapeless._
+
+@implicitNotFound("The `optional`, `zeroOrMore`, `oneOrMore` and `times` modifiers " +
+ "can only be used on rules of type `Rule0`, `Rule1[T]` and `Rule[I, O <: I]`!")
+sealed trait Lifter[M[_], I <: HList, O <: HList] { type In <: HList; type Out <: HList }
+
+object Lifter extends LowerPriorityLifter {
+ implicit def forRule0[M[_]]: Lifter[M, HNil, HNil] { type In = HNil; type Out = HNil } = `n/a`
+ implicit def forRule1[M[_], T]: Lifter[M, HNil, T :: HNil] { type In = HNil; type Out = M[T] :: HNil } = `n/a`
+}
+
+sealed abstract class LowerPriorityLifter {
+ implicit def forReduction[M[_], L <: HList, R <: L]: Lifter[M, L, R] { type In = L; type Out = R } = `n/a`
+}
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/OpTreeContext.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/OpTreeContext.scala
new file mode 100644
index 0000000000..ae71badc3b
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/support/OpTreeContext.scala
@@ -0,0 +1,653 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2.support
+
+import scala.annotation.tailrec
+import akka.parboiled2._
+
+trait OpTreeContext[OpTreeCtx <: ParserMacros.ParserContext] {
+ val c: OpTreeCtx
+ import c.universe._
+
+ sealed abstract class OpTree {
+ def ruleFrame: Tree
+
+ // renders a RuleX Tree
+ def renderRule(ruleName: String): Tree = q"""
+ // split out into separate method so as to not double the rule method size
+ // which would effectively decrease method inlining by about 50%
+ def wrapped: Boolean = ${render(wrapped = true, ruleName)}
+ val matched =
+ if (__collectingErrors) wrapped
+ else ${render(wrapped = false)}
+ if (matched) akka.parboiled2.Rule else null""" // we encode the "matched" boolean as 'ruleResult ne null'
+
+ // renders a Boolean Tree
+ def render(wrapped: Boolean, ruleName: String = ""): Tree =
+ if (wrapped) q"""
+ try ${renderInner(wrapped)}
+ catch {
+ case e: akka.parboiled2.Parser.CollectingRuleStackException ⇒
+ e.save(akka.parboiled2.RuleFrame($ruleFrame, $ruleName))
+ }"""
+ else renderInner(wrapped)
+
+ // renders a Boolean Tree
+ protected def renderInner(wrapped: Boolean): Tree
+ }
+
+ def collector(lifterTree: Tree): Collector =
+ lifterTree match {
+ case q"support.this.$a.forRule0[$b]" ⇒ rule0Collector
+ case q"support.this.$a.forRule1[$b, $c]" ⇒ rule1Collector
+ case q"support.this.$a.forReduction[$b, $c, $d]" ⇒ rule0Collector
+ case x ⇒ c.abort(x.pos, "Unexpected Lifter: " + lifterTree)
+ }
+
+ val opTreePF: PartialFunction[Tree, OpTree] = {
+ case q"$lhs.~[$a, $b]($rhs)($c, $d)" ⇒ Sequence(OpTree(lhs), OpTree(rhs))
+ case q"$lhs.|[$a, $b]($rhs)" ⇒ FirstOf(OpTree(lhs), OpTree(rhs))
+ case q"$a.this.ch($c)" ⇒ CharMatch(c)
+ case q"$a.this.str($s)" ⇒ StringMatch(s)
+ case q"$a.this.valueMap[$b]($m)($hl)" ⇒ MapMatch(m)
+ case q"$a.this.ignoreCase($t)" ⇒ IgnoreCase(t)
+ case q"$a.this.predicate($p)" ⇒ CharPredicateMatch(p)
+ case q"$a.this.anyOf($s)" ⇒ AnyOf(s)
+ case q"$a.this.noneOf($s)" ⇒ NoneOf(s)
+ case q"$a.this.ANY" ⇒ ANY
+ case q"$a.this.optional[$b, $c]($arg)($o)" ⇒ Optional(OpTree(arg), collector(o))
+ case q"$a.this.zeroOrMore[$b, $c]($arg)($s)" ⇒ ZeroOrMore(OpTree(arg), collector(s))
+ case q"$a.this.oneOrMore[$b, $c]($arg)($s)" ⇒ OneOrMore(OpTree(arg), collector(s))
+ case q"$base.times[$a, $b]($r)($s)" ⇒ Times(base, OpTree(r), collector(s))
+ case q"$a.this.&($arg)" ⇒ AndPredicate(OpTree(arg))
+ case q"$a.unary_!()" ⇒ NotPredicate(OpTree(a))
+ case q"$a.this.test($flag)" ⇒ SemanticPredicate(flag)
+ case q"$a.this.capture[$b, $c]($arg)($d)" ⇒ Capture(OpTree(arg))
+ case q"$a.this.run[$b]($arg)($c.fromAux[$d, $e]($rr))" ⇒ RunAction(arg, rr)
+ case q"$a.this.push[$b]($arg)($hl)" ⇒ PushAction(arg, hl)
+ case q"$a.this.drop[$b]($hl)" ⇒ DropAction(hl)
+ case q"$a.this.runSubParser[$b, $c]($f)" ⇒ RunSubParser(f)
+ case x @ q"$a.this.str2CharRangeSupport($l).-($r)" ⇒ CharRange(l, r)
+ case q"$a.this.charAndValue[$t]($b.any2ArrowAssoc[$t1]($c).->[$t2]($v))($hl)" ⇒
+ Sequence(CharMatch(c), PushAction(v, hl))
+ case q"$a.this.stringAndValue[$t]($b.any2ArrowAssoc[$t1]($s).->[$t2]($v))($hl)" ⇒
+ Sequence(StringMatch(s), PushAction(v, hl))
+ case q"$a.this.rule2ActionOperator[$b1, $b2]($r)($o).~>.apply[..$e]($f)($g, support.this.FCapture.apply[$ts])" ⇒
+ Sequence(OpTree(r), Action(f, ts))
+ case x @ q"$a.this.rule2WithSeparatedBy[$b1, $b2]($base.$fun[$d, $e]($arg)($s)).separatedBy($sep)" ⇒
+ val (op, coll, separator) = (OpTree(arg), collector(s), Separator(OpTree(sep)))
+ fun.decodedName.toString match {
+ case "zeroOrMore" ⇒ ZeroOrMore(op, coll, separator)
+ case "oneOrMore" ⇒ OneOrMore(op, coll, separator)
+ case "times" ⇒ Times(base, op, coll, separator)
+ case _ ⇒ c.abort(x.pos, "Unexpected Repeated fun: " + fun)
+ }
+ case call @ (Apply(_, _) | Select(_, _) | Ident(_)) ⇒ RuleCall(call)
+ }
+
+ def OpTree(tree: Tree): OpTree =
+ opTreePF.applyOrElse(tree, (t: Tree) ⇒ c.abort(t.pos, "Invalid rule definition: " + t))
+
+ def Sequence(lhs: OpTree, rhs: OpTree): Sequence =
+ lhs -> rhs match {
+ case (Sequence(lops), Sequence(rops)) ⇒ Sequence(lops ++ rops)
+ case (Sequence(lops), _) ⇒ Sequence(lops :+ rhs)
+ case (_, Sequence(ops)) ⇒ Sequence(lhs +: ops)
+ case _ ⇒ Sequence(Seq(lhs, rhs))
+ }
+
+ case class Sequence(ops: Seq[OpTree]) extends OpTree {
+ require(ops.size >= 2)
+ def ruleFrame = q"akka.parboiled2.RuleFrame.Sequence(${ops.size})"
+ def renderInner(wrapped: Boolean): Tree =
+ ops.map(_.render(wrapped)).reduceLeft((l, r) ⇒ q"$l && $r")
+ }
+
+ def FirstOf(lhs: OpTree, rhs: OpTree): FirstOf =
+ lhs -> rhs match {
+ case (FirstOf(lops), FirstOf(rops)) ⇒ FirstOf(lops ++ rops)
+ case (FirstOf(lops), _) ⇒ FirstOf(lops :+ rhs)
+ case (_, FirstOf(ops)) ⇒ FirstOf(lhs +: ops)
+ case _ ⇒ FirstOf(Seq(lhs, rhs))
+ }
+
+ case class FirstOf(ops: Seq[OpTree]) extends OpTree {
+ def ruleFrame = q"akka.parboiled2.RuleFrame.FirstOf(${ops.size})"
+ def renderInner(wrapped: Boolean): Tree =
+ q"""val mark = __saveState; ${
+ ops.map(_.render(wrapped)).reduceLeft((l, r) ⇒ q"$l || { __restoreState(mark); $r }")
+ }"""
+ }
+
+ case class CharMatch(charTree: Tree) extends OpTree {
+ def ruleFrame = q"akka.parboiled2.RuleFrame.CharMatch($charTree)"
+ def renderInner(wrapped: Boolean): Tree = {
+ val unwrappedTree = q"cursorChar == $charTree && __advance()"
+ if (wrapped) q"$unwrappedTree && __updateMaxCursor() || __registerMismatch()" else unwrappedTree
+ }
+ }
+
+ case class StringMatch(stringTree: Tree) extends OpTree {
+ final private val autoExpandMaxStringLength = 8
+ def renderInner(wrapped: Boolean): Tree = `n/a`
+ def ruleFrame = q"akka.parboiled2.RuleFrame.StringMatch($stringTree)"
+ override def render(wrapped: Boolean, ruleName: String = ""): Tree = {
+ def unrollUnwrapped(s: String, ix: Int = 0): Tree =
+ if (ix < s.length) q"""
+ if (cursorChar == ${s charAt ix}) {
+ __advance()
+ ${unrollUnwrapped(s, ix + 1)}:Boolean
+ } else false"""
+ else q"true"
+ def unrollWrapped(s: String, ix: Int = 0): Tree =
+ if (ix < s.length) {
+ val ch = s charAt ix
+ q"""
+ if (cursorChar == $ch) {
+ __advance()
+ __updateMaxCursor()
+ ${unrollWrapped(s, ix + 1)}
+ } else {
+ try __registerMismatch()
+ catch {
+ case e: akka.parboiled2.Parser.CollectingRuleStackException ⇒
+ e.save(akka.parboiled2.RuleFrame(akka.parboiled2.RuleFrame.StringMatch($s), $ruleName),
+ akka.parboiled2.RuleFrame.CharMatch($ch))
+ }
+ }"""
+ } else q"true"
+
+ stringTree match {
+ case Literal(Constant(s: String)) if s.length <= autoExpandMaxStringLength ⇒
+ if (s.isEmpty) q"true" else if (wrapped) unrollWrapped(s) else unrollUnwrapped(s)
+ case _ ⇒
+ if (wrapped) q"__matchStringWrapped($stringTree, $ruleName)"
+ else q"__matchString($stringTree)"
+ }
+ }
+ }
+
+ case class MapMatch(mapTree: Tree) extends OpTree {
+ def ruleFrame = q"akka.parboiled2.RuleFrame.MapMatch($mapTree)"
+ def renderInner(wrapped: Boolean): Tree = `n/a`
+ override def render(wrapped: Boolean, ruleName: String = ""): Tree =
+ if (wrapped) q"__matchMapWrapped($mapTree, $ruleName)"
+ else q"__matchMap($mapTree)"
+ }
+
+ def IgnoreCase(argTree: Tree): OpTree = {
+ val argTypeSymbol = argTree.tpe.typeSymbol
+ if (argTypeSymbol == definitions.CharClass) IgnoreCaseChar(argTree)
+ else if (argTypeSymbol == definitions.StringClass) IgnoreCaseString(argTree)
+ else c.abort(argTree.pos, "Unexpected `ignoreCase` argument type: " + argTypeSymbol)
+ }
+
+ case class IgnoreCaseChar(charTree: Tree) extends OpTree {
+ def ruleFrame = q"akka.parboiled2.RuleFrame.IgnoreCaseChar($charTree)"
+ def renderInner(wrapped: Boolean): Tree = {
+ val unwrappedTree = q"_root_.java.lang.Character.toLowerCase(cursorChar) == $charTree && __advance()"
+ if (wrapped) q"$unwrappedTree && __updateMaxCursor() || __registerMismatch()" else unwrappedTree
+ }
+ }
+
+ case class IgnoreCaseString(stringTree: Tree) extends OpTree {
+ final private val autoExpandMaxStringLength = 8
+ def renderInner(wrapped: Boolean): Tree = `n/a`
+ def ruleFrame = q"akka.parboiled2.RuleFrame.IgnoreCaseString($stringTree)"
+ override def render(wrapped: Boolean, ruleName: String = ""): Tree = {
+ def unrollUnwrapped(s: String, ix: Int = 0): Tree =
+ if (ix < s.length) q"""
+ if (_root_.java.lang.Character.toLowerCase(cursorChar) == ${s charAt ix}) {
+ __advance()
+ ${unrollUnwrapped(s, ix + 1)}
+ } else false"""
+ else q"true"
+ def unrollWrapped(s: String, ix: Int = 0): Tree =
+ if (ix < s.length) {
+ val ch = s charAt ix
+ q"""
+ if (_root_.java.lang.Character.toLowerCase(cursorChar) == $ch) {
+ __advance()
+ __updateMaxCursor()
+ ${unrollWrapped(s, ix + 1)}
+ } else {
+ try __registerMismatch()
+ catch {
+ case e: akka.parboiled2.Parser.CollectingRuleStackException ⇒
+ e.save(akka.parboiled2.RuleFrame(akka.parboiled2.RuleFrame.IgnoreCaseString($s), $ruleName),
+ akka.parboiled2.RuleFrame.IgnoreCaseChar($ch))
+ }
+ }"""
+ } else q"true"
+
+ stringTree match {
+ case Literal(Constant(s: String)) if s.length <= autoExpandMaxStringLength ⇒
+ if (s.isEmpty) q"true" else if (wrapped) unrollWrapped(s) else unrollUnwrapped(s)
+ case _ ⇒
+ if (wrapped) q"__matchIgnoreCaseStringWrapped($stringTree, $ruleName)"
+ else q"__matchIgnoreCaseString($stringTree)"
+ }
+ }
+ }
+
+ case class CharPredicateMatch(predicateTree: Tree) extends OpTree {
+ def predicateName = callName(predicateTree) getOrElse ""
+ def ruleFrame = q"akka.parboiled2.RuleFrame.CharPredicateMatch($predicateTree, $predicateName)"
+ def renderInner(wrapped: Boolean): Tree = {
+ val unwrappedTree = q"$predicateTree(cursorChar) && __advance()"
+ if (wrapped) q"$unwrappedTree && __updateMaxCursor() || __registerMismatch()" else unwrappedTree
+ }
+ }
+
+ case class AnyOf(stringTree: Tree) extends OpTree {
+ def ruleFrame = q"akka.parboiled2.RuleFrame.AnyOf($stringTree)"
+ def renderInner(wrapped: Boolean): Tree =
+ if (wrapped) q"__matchAnyOf($stringTree) && __updateMaxCursor() || __registerMismatch()"
+ else q"__matchAnyOf($stringTree)"
+ }
+
+ case class NoneOf(stringTree: Tree) extends OpTree {
+ def ruleFrame = q"akka.parboiled2.RuleFrame.NoneOf($stringTree)"
+ def renderInner(wrapped: Boolean): Tree =
+ if (wrapped) q"__matchNoneOf($stringTree) && __updateMaxCursor() || __registerMismatch()"
+ else q"__matchNoneOf($stringTree)"
+ }
+
+ case object ANY extends OpTree {
+ def ruleFrame = reify(RuleFrame.ANY).tree
+ def renderInner(wrapped: Boolean): Tree = {
+ val unwrappedTree = q"cursorChar != EOI && __advance()"
+ if (wrapped) q"$unwrappedTree && __updateMaxCursor() || __registerMismatch()" else unwrappedTree
+ }
+ }
+
+ case class Optional(op: OpTree, collector: Collector) extends OpTree {
+ def ruleFrame = reify(RuleFrame.Optional).tree
+ def renderInner(wrapped: Boolean): Tree = q"""
+ val mark = __saveState
+ if (${op.render(wrapped)}) {
+ ${collector.pushSomePop}
+ } else {
+ __restoreState(mark)
+ ${collector.pushNone}
+ }
+ true"""
+ }
+
+ case class ZeroOrMore(op: OpTree, collector: Collector, separator: Separator = null) extends OpTree {
+ def ruleFrame = reify(RuleFrame.ZeroOrMore).tree
+ def renderInner(wrapped: Boolean): Tree = {
+ val recurse =
+ if (separator eq null) q"rec(__saveState)"
+ else q"val m = __saveState; if (${separator(wrapped)}) rec(m) else m"
+
+ q"""
+ ${collector.valBuilder}
+
+ @_root_.scala.annotation.tailrec def rec(mark: akka.parboiled2.Parser.Mark): akka.parboiled2.Parser.Mark =
+ if (${op.render(wrapped)}) {
+ ${collector.popToBuilder}
+ $recurse
+ } else mark
+
+ __restoreState(rec(__saveState))
+ ${collector.pushBuilderResult}"""
+ }
+ }
+
+ case class OneOrMore(op: OpTree, collector: Collector, separator: Separator = null) extends OpTree {
+ def ruleFrame = reify(RuleFrame.OneOrMore).tree
+ def renderInner(wrapped: Boolean): Tree = {
+ val recurse =
+ if (separator eq null) q"rec(__saveState)"
+ else q"val m = __saveState; if (${separator(wrapped)}) rec(m) else m"
+
+ q"""
+ val firstMark = __saveState
+ ${collector.valBuilder}
+
+ @_root_.scala.annotation.tailrec def rec(mark: akka.parboiled2.Parser.Mark): akka.parboiled2.Parser.Mark =
+ if (${op.render(wrapped)}) {
+ ${collector.popToBuilder}
+ $recurse
+ } else mark
+
+ val mark = rec(firstMark)
+ mark != firstMark && {
+ __restoreState(mark)
+ ${collector.pushBuilderResult}
+ }"""
+ }
+ }
+
+ def Times(base: Tree, rule: OpTree, collector: Collector, separator: Separator = null): OpTree =
+ base match {
+ case q"$a.this.int2NTimes($n)" ⇒ n match {
+ case Literal(Constant(i: Int)) ⇒
+ if (i < 0) c.abort(base.pos, "`x` in `x.times` must be non-negative")
+ else if (i == 1) rule
+ else Times(rule, q"val min, max = $n", collector, separator)
+ case x @ (Ident(_) | Select(_, _)) ⇒ Times(rule, q"val min = $n; val max = min", collector, separator)
+ case _ ⇒ c.abort(n.pos, "Invalid int base expression for `.times(...)`: " + n)
+ }
+ case q"$a.this.range2NTimes($r)" ⇒ r match {
+ case q"scala.this.Predef.intWrapper($mn).to($mx)" ⇒ (mn, mx) match {
+ case (Literal(Constant(min: Int)), Literal(Constant(max: Int))) ⇒
+ if (min < 0) c.abort(mn.pos, "`min` in `(min to max).times` must be non-negative")
+ else if (max < 0) c.abort(mx.pos, "`max` in `(min to max).times` must be non-negative")
+ else if (max < min) c.abort(mx.pos, "`max` in `(min to max).times` must be >= `min`")
+ else Times(rule, q"val min = $mn; val max = $mx", collector, separator)
+ case ((Ident(_) | Select(_, _)), (Ident(_) | Select(_, _))) ⇒
+ Times(rule, q"val min = $mn; val max = $mx", collector, separator)
+ case _ ⇒ c.abort(r.pos, "Invalid int range expression for `.times(...)`: " + r)
+ }
+ case x @ (Ident(_) | Select(_, _)) ⇒
+ Times(rule, q"val r = $r; val min = r.start; val max = r.end", collector, separator)
+ case _ ⇒ c.abort(r.pos, "Invalid range base expression for `.times(...)`: " + r)
+ }
+ case _ ⇒ c.abort(base.pos, "Invalid base expression for `.times(...)`: " + base)
+ }
+
+ case class Times(op: OpTree, init: Tree, collector: Collector, separator: Separator) extends OpTree {
+ val Block(inits, _) = init
+ def ruleFrame = q"..$inits; akka.parboiled2.RuleFrame.Times(min, max)"
+ def renderInner(wrapped: Boolean): Tree = {
+ val recurse =
+ if (separator eq null) q"rec(count + 1, __saveState)"
+ else q"""
+ val m = __saveState; if (${separator(wrapped)}) rec(count + 1, m)
+ else (count >= min) && { __restoreState(m); true }"""
+
+ q"""
+ ${collector.valBuilder}
+ ..$inits
+
+ @_root_.scala.annotation.tailrec def rec(count: Int, mark: akka.parboiled2.Parser.Mark): Boolean = {
+ if (${op.render(wrapped)}) {
+ ${collector.popToBuilder}
+ if (count < max) $recurse else true
+ } else (count > min) && { __restoreState(mark); true }
+ }
+
+ (max <= 0) || rec(1, __saveState) && ${collector.pushBuilderResult}"""
+ }
+ }
+
+ case class AndPredicate(op: OpTree) extends OpTree {
+ def ruleFrame = reify(RuleFrame.AndPredicate).tree
+ def renderInner(wrapped: Boolean): Tree = q"""
+ val mark = __saveState
+ val result = ${op.render(wrapped)}
+ __restoreState(mark)
+ result"""
+ }
+
+ case class NotPredicate(op: OpTree) extends OpTree {
+ def renderInner(wrapped: Boolean): Tree = `n/a`
+ def ruleFrame = reify(RuleFrame.NotPredicate).tree
+ override def render(wrapped: Boolean, ruleName: String = ""): Tree = {
+ val unwrappedTree = q"""
+ val mark = __saveState
+ val saved = __enterNotPredicate
+ val result = ${op.render(wrapped)}
+ __exitNotPredicate(saved)
+ __restoreState(mark)
+ !result"""
+ if (wrapped) q"""
+ try $unwrappedTree || __registerMismatch()
+ catch {
+ case e: akka.parboiled2.Parser.CollectingRuleStackException ⇒
+ e.save(akka.parboiled2.RuleFrame($ruleFrame, $ruleName), ${op.ruleFrame})
+ }"""
+ else unwrappedTree
+ }
+ }
+
+ case class SemanticPredicate(flagTree: Tree) extends OpTree {
+ def ruleFrame = reify(RuleFrame.SemanticPredicate).tree
+ def renderInner(wrapped: Boolean): Tree =
+ if (wrapped) flagTree else q"$flagTree || __registerMismatch()"
+ }
+
+ case class Capture(op: OpTree) extends OpTree {
+ def ruleFrame = reify(RuleFrame.Capture).tree
+ def renderInner(wrapped: Boolean): Tree = q"""
+ val start = cursor
+ ${op.render(wrapped)} && {valueStack.push(input.sliceString(start, cursor)); true}"""
+ }
+
+ case class RunAction(argTree: Tree, rrTree: Tree) extends OpTree {
+ def ruleFrame = reify(RuleFrame.Run).tree
+ def renderInner(wrapped: Boolean): Tree = {
+ def renderFunctionAction(resultTypeTree: Tree, argTypeTrees: Tree*): Tree = {
+ def actionBody(tree: Tree): Tree =
+ tree match {
+ case Block(statements, res) ⇒ block(statements, actionBody(res))
+
+ case q"(..$args ⇒ $body)" ⇒
+ def rewrite(tree: Tree): Tree =
+ tree match {
+ case Block(statements, res) ⇒ block(statements, rewrite(res))
+ case x if resultTypeTree.tpe <:< typeOf[Rule[_, _]] ⇒ expand(x, wrapped)
+ case x ⇒ q"__push($x)"
+ }
+ val valDefs = args.zip(argTypeTrees).map { case (a, t) ⇒ q"val ${a.name} = valueStack.pop().asInstanceOf[${t.tpe}]" }.reverse
+ block(valDefs, rewrite(body))
+
+ case x ⇒ c.abort(argTree.pos, "Unexpected `run` argument: " + show(argTree))
+ }
+
+ actionBody(c.resetLocalAttrs(argTree))
+ }
+
+ rrTree match {
+ case q"RunResult.this.Aux.forAny[$t]" ⇒ block(argTree, q"true")
+
+ case q"RunResult.this.Aux.forRule[$t]" ⇒ expand(argTree, wrapped)
+
+ case q"RunResult.this.Aux.forF1[$z, $r, $in, $out]($a)" ⇒ renderFunctionAction(r, z)
+ case q"RunResult.this.Aux.forF2[$y, $z, $r, $in, $out]($a)" ⇒ renderFunctionAction(r, y, z)
+ case q"RunResult.this.Aux.forF3[$x, $y, $z, $r, $in, $out]($a)" ⇒ renderFunctionAction(r, x, y, z)
+ case q"RunResult.this.Aux.forF4[$w, $x, $y, $z, $r, $in, $out]($a)" ⇒ renderFunctionAction(r, w, x, y, z)
+ case q"RunResult.this.Aux.forF5[$v, $w, $x, $y, $z, $r, $in, $out]($a)" ⇒ renderFunctionAction(r, v, w, x, y, z)
+
+ case q"RunResult.this.Aux.forFHList[$il, $r, $in, $out]($a)" ⇒
+ c.abort(argTree.pos, "`run` with a function taking an HList is not yet implemented") // TODO: implement
+
+ case x ⇒ c.abort(rrTree.pos, "Unexpected RunResult.Aux: " + show(x))
+ }
+ }
+ }
+
+ case class PushAction(argTree: Tree, hlTree: Tree) extends OpTree {
+ def ruleFrame = reify(RuleFrame.Push).tree
+ def renderInner(wrapped: Boolean): Tree =
+ block(hlTree match {
+ case q"support.this.HListable.fromUnit" ⇒ argTree
+ case q"support.this.HListable.fromHList[$t]" ⇒ q"valueStack.pushAll(${c.resetLocalAttrs(argTree)})"
+ case q"support.this.HListable.fromAnyRef[$t]" ⇒ q"valueStack.push(${c.resetLocalAttrs(argTree)})"
+ case x ⇒ c.abort(hlTree.pos, "Unexpected HListable: " + show(x))
+ }, q"true")
+ }
+
+ case class DropAction(hlTree: Tree) extends OpTree {
+ def ruleFrame = reify(RuleFrame.Drop).tree
+ def renderInner(wrapped: Boolean): Tree =
+ hlTree match {
+ case q"support.this.HListable.fromUnit" ⇒ q"true"
+ case q"support.this.HListable.fromAnyRef[$t]" ⇒ q"valueStack.pop(); true"
+ case q"support.this.HListable.fromHList[$t]" ⇒
+ @tailrec def rec(t: Type, result: List[Tree] = Nil): List[Tree] =
+ t match { // TODO: how can we use type quotes here, e.g. tq"shapeless.HNil"?
+ case TypeRef(_, sym, List(_, tail)) if sym == HListConsTypeSymbol ⇒ rec(tail, q"valueStack.pop()" :: result)
+ case TypeRef(_, sym, _) if sym == HNilTypeSymbol ⇒ result
+ }
+ Block(rec(t.tpe), q"true")
+ case x ⇒ c.abort(hlTree.pos, "Unexpected HListable: " + show(x))
+ }
+ }
+
+ case class RuleCall(call: Tree) extends OpTree {
+ def calleeName = callName(call) getOrElse c.abort(call.pos, "Illegal rule call: " + call)
+ def ruleFrame = q"akka.parboiled2.RuleFrame.RuleCall($calleeName)"
+ def renderInner(wrapped: Boolean): Tree = q"$call ne null"
+ }
+
+ def CharRange(lowerTree: Tree, upperTree: Tree): CharacterRange = {
+ val (lower, upper) = lowerTree -> upperTree match {
+ case (Literal(Constant(l: String)), Literal(Constant(u: String))) ⇒ l -> u
+ case _ ⇒ c.abort(lowerTree.pos, "Character ranges must be specified with string literals")
+ }
+ if (lower.length != 1) c.abort(lowerTree.pos, "lower bound must be a single char string")
+ if (upper.length != 1) c.abort(upperTree.pos, "upper bound must be a single char string")
+ val lowerBoundChar = lower.charAt(0)
+ val upperBoundChar = upper.charAt(0)
+ if (lowerBoundChar > upperBoundChar) c.abort(lowerTree.pos, "lower bound must not be > upper bound")
+ CharacterRange(lowerBoundChar, upperBoundChar)
+ }
+
+ case class CharacterRange(lowerBound: Char, upperBound: Char) extends OpTree {
+ def ruleFrame = q"akka.parboiled2.RuleFrame.CharRange($lowerBound, $upperBound)"
+ def renderInner(wrapped: Boolean): Tree = {
+ val unwrappedTree = q"""
+ val char = cursorChar
+ $lowerBound <= char && char <= $upperBound && __advance()"""
+ if (wrapped) q"$unwrappedTree && __updateMaxCursor() || __registerMismatch()" else unwrappedTree
+ }
+ }
+
+ case class Action(actionTree: Tree, actionTypeTree: Tree) extends OpTree {
+ val actionType: List[Type] = actionTypeTree.tpe match {
+ case TypeRef(_, _, args) if args.nonEmpty ⇒ args
+ case x ⇒ c.abort(actionTree.pos, "Unexpected action type: " + x)
+ }
+ def ruleFrame = reify(RuleFrame.Action).tree
+ def renderInner(wrapped: Boolean): Tree = {
+ val argTypes = actionType dropRight 1
+
+ def popToVals(valNames: List[TermName]): List[Tree] =
+ (valNames zip argTypes).map { case (n, t) ⇒ q"val $n = valueStack.pop().asInstanceOf[$t]" }.reverse
+
+ def actionBody(tree: Tree): Tree =
+ tree match {
+ case Block(statements, res) ⇒ block(statements, actionBody(res))
+
+ case x @ (Ident(_) | Select(_, _)) ⇒
+ val valNames: List[TermName] = argTypes.indices.map { i ⇒ newTermName("value" + i) }(collection.breakOut)
+ val args = valNames map Ident.apply
+ block(popToVals(valNames), q"__push($x(..$args))")
+
+ case q"(..$args ⇒ $body)" ⇒
+ def rewrite(tree: Tree): Tree =
+ tree match {
+ case Block(statements, res) ⇒ block(statements, rewrite(res))
+ case x if actionType.last <:< typeOf[Rule[_, _]] ⇒ expand(x, wrapped)
+ case x ⇒ q"__push($x)"
+ }
+ block(popToVals(args.map(_.name)), rewrite(body))
+ }
+
+ actionBody(c.resetLocalAttrs(actionTree))
+ }
+ }
+
+ case class RunSubParser(fTree: Tree) extends OpTree {
+ def ruleFrame = reify(RuleFrame.RunSubParser).tree
+ def renderInner(wrapped: Boolean): Tree = {
+ def rewrite(arg: TermName, tree: Tree): Tree =
+ tree match {
+ case Block(statements, res) ⇒ block(statements, rewrite(arg, res))
+ case q"$p.$rule" ⇒ q"""
+ val $arg = new __SubParserInput() // TODO: avoid re-allocation by re-using a cached instance
+ val __subParser = $p
+ val offset = cursor
+ __subParser.copyStateFrom(this, offset)
+ try __subParser.$rule ne null
+ finally this.copyStateFrom(__subParser, -offset)"""
+ case x ⇒ c.abort(x.pos, "Illegal runSubParser expr: " + show(x))
+ }
+
+ val q"($arg ⇒ $body)" = c.resetLocalAttrs(fTree)
+ rewrite(arg.name, body)
+ }
+ }
+
+ /////////////////////////////////// helpers ////////////////////////////////////
+
+ class Collector(
+ val valBuilder: Tree,
+ val popToBuilder: Tree,
+ val pushBuilderResult: Tree,
+ val pushSomePop: Tree,
+ val pushNone: Tree)
+
+ lazy val rule0Collector = {
+ val unit = q"()"
+ new Collector(unit, unit, q"true", unit, unit)
+ }
+
+ lazy val rule1Collector = new Collector(
+ valBuilder = q"val builder = new scala.collection.immutable.VectorBuilder[Any]",
+ popToBuilder = q"builder += valueStack.pop()",
+ pushBuilderResult = q"valueStack.push(builder.result()); true",
+ pushSomePop = q"valueStack.push(Some(valueStack.pop()))",
+ pushNone = q"valueStack.push(None)")
+
+ type Separator = Boolean ⇒ Tree
+
+ def Separator(op: OpTree): Separator = wrapped ⇒ op.render(wrapped)
+
+ lazy val HListConsTypeSymbol = typeOf[akka.shapeless.::[_, _]].typeSymbol
+ lazy val HNilTypeSymbol = typeOf[akka.shapeless.HNil].typeSymbol
+
+ // tries to match and expand the leaves of the given Tree
+ def expand(tree: Tree, wrapped: Boolean): Tree =
+ tree match {
+ case Block(statements, res) ⇒ block(statements, expand(res, wrapped))
+ case If(cond, thenExp, elseExp) ⇒ If(cond, expand(thenExp, wrapped), expand(elseExp, wrapped))
+ case Match(selector, cases) ⇒ Match(selector, cases.map(expand(_, wrapped).asInstanceOf[CaseDef]))
+ case CaseDef(pat, guard, body) ⇒ CaseDef(pat, guard, expand(body, wrapped))
+ case x ⇒ opTreePF.andThen(_.render(wrapped)).applyOrElse(tree, (t: Tree) ⇒ q"$t ne null")
+ }
+
+ @tailrec
+ private def callName(tree: Tree): Option[String] =
+ tree match {
+ case Ident(name) ⇒ Some(name.decodedName.toString)
+ case Select(_, name) ⇒ Some(name.decodedName.toString)
+ case Apply(fun, _) ⇒ callName(fun)
+ case _ ⇒ None
+ }
+
+ def block(a: Tree, b: Tree): Tree =
+ a match {
+ case Block(a1, a2) ⇒ b match {
+ case Block(b1, b2) ⇒ Block(a1 ::: a2 :: b1, b2)
+ case _ ⇒ Block(a1 ::: a2 :: Nil, b)
+ }
+ case _ ⇒ b match {
+ case Block(b1, b2) ⇒ Block(a :: b1, b2)
+ case _ ⇒ Block(a :: Nil, b)
+ }
+ }
+
+ def block(stmts: List[Tree], expr: Tree): Tree =
+ expr match {
+ case Block(a, b) ⇒ block(stmts ::: a ::: Nil, b)
+ case _ ⇒ Block(stmts, expr)
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/RunResult.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/RunResult.scala
new file mode 100644
index 0000000000..06e1f1b7be
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/support/RunResult.scala
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2.support
+
+import akka.shapeless._
+import akka.parboiled2._
+
+// phantom type, only used for rule DSL typing
+sealed trait RunResult[T] {
+ type Out <: RuleX
+}
+
+object RunResult {
+ implicit def fromAux[T, Out0 <: RuleX](implicit aux: Aux[T, Out0]): RunResult[T] { type Out = Out0 } = `n/a`
+
+ sealed trait Aux[T, Out]
+ object Aux extends Aux1 {
+ implicit def forRule[R <: RuleX]: Aux[R, R] = `n/a`
+ //implicit def forFHList[I <: HList, R, In0 <: HList, Out0 <: HList](implicit x: JA[I, R, In0, Out0]): Aux[I ⇒ R, Rule[In0, Out0]] = `n/a`
+ }
+ abstract class Aux1 extends Aux2 {
+ implicit def forF1[Z, R, In0 <: HList, Out0 <: HList](implicit x: JA[Z :: HNil, R, In0, Out0]): Aux[Z ⇒ R, Rule[In0, Out0]] = `n/a`
+ implicit def forF2[Y, Z, R, In0 <: HList, Out0 <: HList](implicit x: JA[Y :: Z :: HNil, R, In0, Out0]): Aux[(Y, Z) ⇒ R, Rule[In0, Out0]] = `n/a`
+ implicit def forF3[X, Y, Z, R, In0 <: HList, Out0 <: HList](implicit x: JA[X :: Y :: Z :: HNil, R, In0, Out0]): Aux[(X, Y, Z) ⇒ R, Rule[In0, Out0]] = `n/a`
+ implicit def forF4[W, X, Y, Z, R, In0 <: HList, Out0 <: HList](implicit x: JA[W :: X :: Y :: Z :: HNil, R, In0, Out0]): Aux[(W, X, Y, Z) ⇒ R, Rule[In0, Out0]] = `n/a`
+ implicit def forF5[V, W, X, Y, Z, R, In0 <: HList, Out0 <: HList](implicit x: JA[V :: W :: X :: Y :: Z :: HNil, R, In0, Out0]): Aux[(V, W, X, Y, Z) ⇒ R, Rule[In0, Out0]] = `n/a`
+ }
+
+ abstract class Aux2 {
+ protected type JA[I <: HList, R, In0 <: HList, Out0 <: HList] = Join.Aux[I, HNil, HNil, R, HNil, In0, Out0]
+ implicit def forAny[T]: Aux[T, Rule0] = `n/a`
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/TailSwitch.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/TailSwitch.scala
new file mode 100644
index 0000000000..05fa9094c3
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/support/TailSwitch.scala
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2.support
+
+import scala.annotation.implicitNotFound
+import akka.shapeless._
+import akka.shapeless.ops.hlist.ReversePrepend
+
+// format: OFF
+
+/**
+ * type-level implementation of this logic:
+ * Out =
+ * R if T has a tail of type L
+ * (L dropRight T) ::: R if L has a tail of type T
+ */
+@implicitNotFound("Illegal rule composition")
+sealed trait TailSwitch[L <: HList, T <: HList, R <: HList] {
+ type Out <: HList
+}
+object TailSwitch {
+ implicit def tailSwitch[L <: HList, T <: HList, R <: HList, Out0 <: HList]
+ (implicit ts: Aux[L, L, T, T, R, HNil, Out0]): TailSwitch[L, T, R] { type Out = Out0 } = `n/a`
+
+ // type-level implementation of this algorithm:
+ // @tailrec def rec(L, LI, T, TI, R, RI) =
+ // if (TI <: L) R
+ // else if (LI <: T) RI.reverse ::: R
+ // else if (LI <: HNil) rec(L, HNil, T, TI.tail, R, RI)
+ // else if (TI <: HNil) rec(L, LI.tail, T, HNil, R, LI.head :: RI)
+ // rec(L, LI.tail, T, TI.tail, R, LI.head :: RI)
+ // rec(L, L, T, T, R, HNil)
+ sealed trait Aux[L <: HList, LI <: HList, T <: HList, TI <: HList, R <: HList, RI <: HList, Out <: HList]
+
+ object Aux extends Aux1 {
+ // if TI <: L then Out = R
+ implicit def terminate1[L <: HList, LI <: HList, T <: HList, TI <: L, R <: HList, RI <: HList]:
+ Aux[L, LI, T, TI, R, RI, R] = `n/a`
+ }
+
+ private[parboiled2] abstract class Aux1 extends Aux2 {
+ // if LI <: T then Out = RI.reverse ::: R
+ implicit def terminate2[T <: HList, TI <: HList, L <: HList, LI <: T, R <: HList, RI <: HList, Out <: HList]
+ (implicit rp: ReversePrepend.Aux[RI, R, Out]): Aux[L, LI, T, TI, R, RI, Out] = `n/a`
+ }
+
+ private[parboiled2] abstract class Aux2 {
+ implicit def iter1[L <: HList, T <: HList, TH, TT <: HList, R <: HList, RI <: HList, Out <: HList]
+ (implicit next: Aux[L, HNil, T, TT, R, RI, Out]): Aux[L, HNil, T, TH :: TT, R, RI, Out] = `n/a`
+
+ implicit def iter2[L <: HList, LH, LT <: HList, T <: HList, R <: HList, RI <: HList, Out <: HList]
+ (implicit next: Aux[L, LT, T, HNil, R, LH :: RI, Out]): Aux[L, LH :: LT, T, HNil, R, RI, Out] = `n/a`
+
+ implicit def iter3[L <: HList, LH, LT <: HList, T <: HList, TH, TT <: HList, R <: HList, RI <: HList, Out <: HList]
+ (implicit next: Aux[L, LT, T, TT, R, LH :: RI, Out]): Aux[L, LH :: LT, T, TH :: TT, R, RI, Out] = `n/a`
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/Unpack.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/Unpack.scala
new file mode 100644
index 0000000000..a16697fe43
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/support/Unpack.scala
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2.support
+
+import akka.shapeless._
+
+/**
+ * "Unpacks" an HList if it has only zero or one element(s).
+ * Out =
+ * Unit if L == HNil
+ * T if L == T :: HNil
+ * L otherwise
+ *
+ * You can `import Unpack.dontUnpack` if you'd like to circumvent this unpacking logic.
+ */
+sealed trait Unpack[L <: HList] {
+ type Out
+ def apply(hlist: L): Out
+}
+
+object Unpack extends AlternativeUnpacks {
+
+ implicit def fromAux[L <: HList, Out0](implicit aux: Aux[L, Out0]) = new Unpack[L] {
+ type Out = Out0
+ def apply(hlist: L) = aux(hlist)
+ }
+
+ sealed trait Aux[L <: HList, Out0] {
+ def apply(hlist: L): Out0
+ }
+
+ implicit def hnil[L <: HNil]: Aux[L, Unit] = HNilUnpack.asInstanceOf[Aux[L, Unit]]
+ implicit object HNilUnpack extends Aux[HNil, Unit] {
+ def apply(hlist: HNil): Unit = ()
+ }
+
+ implicit def single[T]: Aux[T :: HNil, T] = SingleUnpack.asInstanceOf[Aux[T :: HNil, T]]
+ private object SingleUnpack extends Aux[Any :: HList, Any] {
+ def apply(hlist: Any :: HList): Any = hlist.head
+ }
+}
+
+sealed abstract class AlternativeUnpacks {
+ /**
+ * Import if you'd like to *always* deliver the valueStack as an `HList`
+ * at the end of the parsing run, even if it has only zero or one element(s).
+ */
+ implicit def dontUnpack[L <: HList]: Unpack.Aux[L, L] = DontUnpack.asInstanceOf[Unpack.Aux[L, L]]
+ private object DontUnpack extends Unpack.Aux[HList, HList] {
+ def apply(hlist: HList): HList = hlist
+ }
+}
+
diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/package.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/package.scala
new file mode 100644
index 0000000000..836b5d1a7a
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/parboiled2/support/package.scala
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.parboiled2
+
+package object support {
+ private[parboiled2] def `n/a` = throw new IllegalStateException
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/conversions.scala b/akka-parsing/src/main/scala/akka/shapeless/conversions.scala
new file mode 100644
index 0000000000..89be8231ad
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/conversions.scala
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import ops.hlist.Tupler
+
+/**
+ * Higher ranked function which converts `HLists` to tuples.
+ */
+object tupled extends Poly1 {
+ implicit def caseHList[L <: HList](implicit tupler: Tupler[L]) = at[L](tupler(_))
+}
+
+/**
+ * Higher ranked function which converts products to `HLists`.
+ */
+object productElements extends Poly1 {
+ implicit def caseProduct[P](implicit gen: Generic[P]) = at[P](p ⇒ gen.to(p))
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/coproduct.scala b/akka-parsing/src/main/scala/akka/shapeless/coproduct.scala
new file mode 100644
index 0000000000..b1b9b26293
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/coproduct.scala
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2013-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+sealed trait Coproduct
+
+sealed trait :+:[+H, +T <: Coproduct] extends Coproduct
+
+final case class Inl[+H, +T <: Coproduct](head: H) extends :+:[H, T] {
+ override def toString = head.toString
+}
+
+final case class Inr[+H, +T <: Coproduct](tail: T) extends :+:[H, T] {
+ override def toString = tail.toString
+}
+
+sealed trait CNil extends Coproduct
+
+object Coproduct {
+ import ops.coproduct.Inject
+ import syntax.CoproductOps
+
+ class MkCoproduct[C <: Coproduct] {
+ def apply[T](t: T)(implicit inj: Inject[C, T]): C = inj(t)
+ }
+
+ def apply[C <: Coproduct] = new MkCoproduct[C]
+
+ implicit def cpOps[C <: Coproduct](c: C) = new CoproductOps(c)
+}
+
+object union {
+ import ops.union.{ Keys, Values }
+ import syntax.UnionOps
+
+ implicit def unionOps[C <: Coproduct](u: C): UnionOps[C] = new UnionOps(u)
+
+ trait UnionType {
+ type Union <: Coproduct
+ type Keys <: HList
+ type Values <: Coproduct
+ }
+
+ object UnionType {
+ type Aux[U, K, V] = UnionType { type Union = U; type Keys = K; type Values = V }
+
+ def apply[U <: Coproduct](implicit keys: Keys[U], values: Values[U]): Aux[U, keys.Out, values.Out] =
+ new UnionType {
+ type Union = U
+ type Keys = keys.Out
+ type Values = values.Out
+ }
+
+ def like[U <: Coproduct](u: U)(implicit keys: Keys[U], values: Values[U]): Aux[U, keys.Out, values.Out] =
+ new UnionType {
+ type Union = U
+ type Keys = keys.Out
+ type Values = values.Out
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/caseinst.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/caseinst.scala
new file mode 100644
index 0000000000..344f4f2780
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/caseinst.scala
@@ -0,0 +1,89 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+trait CaseInst {
+ import poly._
+
+ implicit def inst1[Fn <: Poly, A, Res](cse: Case[Fn, A :: HNil] { type Result = Res }): (A) ⇒ Res =
+ (a: A) ⇒ cse.value(a :: HNil)
+
+ implicit def inst2[Fn <: Poly, A, B, Res](cse: Case[Fn, A :: B :: HNil] { type Result = Res }): (A, B) ⇒ Res =
+ (a: A, b: B) ⇒ cse.value(a :: b :: HNil)
+
+ implicit def inst3[Fn <: Poly, A, B, C, Res](cse: Case[Fn, A :: B :: C :: HNil] { type Result = Res }): (A, B, C) ⇒ Res =
+ (a: A, b: B, c: C) ⇒ cse.value(a :: b :: c :: HNil)
+
+ implicit def inst4[Fn <: Poly, A, B, C, D, Res](cse: Case[Fn, A :: B :: C :: D :: HNil] { type Result = Res }): (A, B, C, D) ⇒ Res =
+ (a: A, b: B, c: C, d: D) ⇒ cse.value(a :: b :: c :: d :: HNil)
+
+ implicit def inst5[Fn <: Poly, A, B, C, D, E, Res](cse: Case[Fn, A :: B :: C :: D :: E :: HNil] { type Result = Res }): (A, B, C, D, E) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E) ⇒ cse.value(a :: b :: c :: d :: e :: HNil)
+
+ implicit def inst6[Fn <: Poly, A, B, C, D, E, F, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: HNil] { type Result = Res }): (A, B, C, D, E, F) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F) ⇒ cse.value(a :: b :: c :: d :: e :: f :: HNil)
+
+ implicit def inst7[Fn <: Poly, A, B, C, D, E, F, G, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: HNil] { type Result = Res }): (A, B, C, D, E, F, G) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: HNil)
+
+ implicit def inst8[Fn <: Poly, A, B, C, D, E, F, G, H, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: HNil)
+
+ implicit def inst9[Fn <: Poly, A, B, C, D, E, F, G, H, I, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: HNil)
+
+ implicit def inst10[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: HNil)
+
+ implicit def inst11[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: HNil)
+
+ implicit def inst12[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: HNil)
+
+ implicit def inst13[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: HNil)
+
+ implicit def inst14[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: HNil)
+
+ implicit def inst15[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: HNil)
+
+ implicit def inst16[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: HNil)
+
+ implicit def inst17[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: HNil)
+
+ implicit def inst18[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: HNil)
+
+ implicit def inst19[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: HNil)
+
+ implicit def inst20[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: HNil)
+
+ implicit def inst21[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: HNil)
+
+ implicit def inst22[Fn <: Poly, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, Res](cse: Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil] { type Result = Res }): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ Res =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U, v: V) ⇒ cse.value(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: v :: HNil)
+
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/cases.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/cases.scala
new file mode 100644
index 0000000000..497bd0ea5a
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/cases.scala
@@ -0,0 +1,353 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+trait Cases {
+ import poly._
+
+ type Case1[Fn, A] = Case[Fn, A :: HNil]
+
+ object Case1 {
+ type Aux[Fn, A, Result0] = Case[Fn, A :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, Result0](fn: (A) ⇒ Result0): Aux[Fn, A, Result0] =
+ new Case[Fn, A :: HNil] {
+ type Result = Result0
+ val value = (l: A :: HNil) ⇒ l match {
+ case a :: HNil ⇒
+ fn(a)
+ }
+ }
+ }
+
+ type Case2[Fn, A, B] = Case[Fn, A :: B :: HNil]
+
+ object Case2 {
+ type Aux[Fn, A, B, Result0] = Case[Fn, A :: B :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, Result0](fn: (A, B) ⇒ Result0): Aux[Fn, A, B, Result0] =
+ new Case[Fn, A :: B :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: HNil) ⇒ l match {
+ case a :: b :: HNil ⇒
+ fn(a, b)
+ }
+ }
+ }
+
+ type Case3[Fn, A, B, C] = Case[Fn, A :: B :: C :: HNil]
+
+ object Case3 {
+ type Aux[Fn, A, B, C, Result0] = Case[Fn, A :: B :: C :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, Result0](fn: (A, B, C) ⇒ Result0): Aux[Fn, A, B, C, Result0] =
+ new Case[Fn, A :: B :: C :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: HNil) ⇒ l match {
+ case a :: b :: c :: HNil ⇒
+ fn(a, b, c)
+ }
+ }
+ }
+
+ type Case4[Fn, A, B, C, D] = Case[Fn, A :: B :: C :: D :: HNil]
+
+ object Case4 {
+ type Aux[Fn, A, B, C, D, Result0] = Case[Fn, A :: B :: C :: D :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, Result0](fn: (A, B, C, D) ⇒ Result0): Aux[Fn, A, B, C, D, Result0] =
+ new Case[Fn, A :: B :: C :: D :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: HNil ⇒
+ fn(a, b, c, d)
+ }
+ }
+ }
+
+ type Case5[Fn, A, B, C, D, E] = Case[Fn, A :: B :: C :: D :: E :: HNil]
+
+ object Case5 {
+ type Aux[Fn, A, B, C, D, E, Result0] = Case[Fn, A :: B :: C :: D :: E :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, Result0](fn: (A, B, C, D, E) ⇒ Result0): Aux[Fn, A, B, C, D, E, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: HNil ⇒
+ fn(a, b, c, d, e)
+ }
+ }
+ }
+
+ type Case6[Fn, A, B, C, D, E, F] = Case[Fn, A :: B :: C :: D :: E :: F :: HNil]
+
+ object Case6 {
+ type Aux[Fn, A, B, C, D, E, F, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, Result0](fn: (A, B, C, D, E, F) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: HNil ⇒
+ fn(a, b, c, d, e, f)
+ }
+ }
+ }
+
+ type Case7[Fn, A, B, C, D, E, F, G] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: HNil]
+
+ object Case7 {
+ type Aux[Fn, A, B, C, D, E, F, G, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, Result0](fn: (A, B, C, D, E, F, G) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: HNil ⇒
+ fn(a, b, c, d, e, f, g)
+ }
+ }
+ }
+
+ type Case8[Fn, A, B, C, D, E, F, G, H] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: HNil]
+
+ object Case8 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, Result0](fn: (A, B, C, D, E, F, G, H) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h)
+ }
+ }
+ }
+
+ type Case9[Fn, A, B, C, D, E, F, G, H, I] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil]
+
+ object Case9 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, Result0](fn: (A, B, C, D, E, F, G, H, I) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i)
+ }
+ }
+ }
+
+ type Case10[Fn, A, B, C, D, E, F, G, H, I, J] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil]
+
+ object Case10 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, Result0](fn: (A, B, C, D, E, F, G, H, I, J) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j)
+ }
+ }
+ }
+
+ type Case11[Fn, A, B, C, D, E, F, G, H, I, J, K] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil]
+
+ object Case11 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k)
+ }
+ }
+ }
+
+ type Case12[Fn, A, B, C, D, E, F, G, H, I, J, K, L] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil]
+
+ object Case12 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l)
+ }
+ }
+ }
+
+ type Case13[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil]
+
+ object Case13 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m)
+ }
+ }
+ }
+
+ type Case14[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil]
+
+ object Case14 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n)
+ }
+ }
+ }
+
+ type Case15[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil]
+
+ object Case15 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
+ }
+ }
+ }
+
+ type Case16[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil]
+
+ object Case16 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p)
+ }
+ }
+ }
+
+ type Case17[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil]
+
+ object Case17 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q)
+ }
+ }
+ }
+
+ type Case18[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil]
+
+ object Case18 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r)
+ }
+ }
+ }
+
+ type Case19[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil]
+
+ object Case19 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s)
+ }
+ }
+ }
+
+ type Case20[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil]
+
+ object Case20 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t)
+ }
+ }
+ }
+
+ type Case21[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil]
+
+ object Case21 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u)
+ }
+ }
+ }
+
+ type Case22[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil]
+
+ object Case22 {
+ type Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, Result0] = Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil] { type Result = Result0 }
+
+ def apply[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, Result0](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ Result0): Aux[Fn, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, Result0] =
+ new Case[Fn, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil] {
+ type Result = Result0
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil) ⇒ l match {
+ case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: v :: HNil ⇒
+ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v)
+ }
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/fnfromproduct.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/fnfromproduct.scala
new file mode 100644
index 0000000000..6164c10732
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/fnfromproduct.scala
@@ -0,0 +1,165 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+package ops
+
+import function.FnFromProduct
+
+trait FnFromProductInstances {
+ type Aux[F, Out0] = FnFromProduct[F] { type Out = Out0 }
+
+ implicit def fnFromProduct0[Res]: Aux[(HNil) ⇒ Res, () ⇒ Res] =
+ new FnFromProduct[(HNil) ⇒ Res] {
+ type Out = () ⇒ Res
+ def apply(hf: (HNil) ⇒ Res): Out = () ⇒ hf(HNil)
+ }
+
+ implicit def fnFromProduct1[A, Res]: Aux[(A :: HNil) ⇒ Res, (A) ⇒ Res] =
+ new FnFromProduct[(A :: HNil) ⇒ Res] {
+ type Out = (A) ⇒ Res
+ def apply(hf: (A :: HNil) ⇒ Res): Out = (a: A) ⇒ hf(a :: HNil)
+ }
+
+ implicit def fnFromProduct2[A, B, Res]: Aux[(A :: B :: HNil) ⇒ Res, (A, B) ⇒ Res] =
+ new FnFromProduct[(A :: B :: HNil) ⇒ Res] {
+ type Out = (A, B) ⇒ Res
+ def apply(hf: (A :: B :: HNil) ⇒ Res): Out = (a: A, b: B) ⇒ hf(a :: b :: HNil)
+ }
+
+ implicit def fnFromProduct3[A, B, C, Res]: Aux[(A :: B :: C :: HNil) ⇒ Res, (A, B, C) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: HNil) ⇒ Res] {
+ type Out = (A, B, C) ⇒ Res
+ def apply(hf: (A :: B :: C :: HNil) ⇒ Res): Out = (a: A, b: B, c: C) ⇒ hf(a :: b :: c :: HNil)
+ }
+
+ implicit def fnFromProduct4[A, B, C, D, Res]: Aux[(A :: B :: C :: D :: HNil) ⇒ Res, (A, B, C, D) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D) ⇒ hf(a :: b :: c :: d :: HNil)
+ }
+
+ implicit def fnFromProduct5[A, B, C, D, E, Res]: Aux[(A :: B :: C :: D :: E :: HNil) ⇒ Res, (A, B, C, D, E) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E) ⇒ hf(a :: b :: c :: d :: e :: HNil)
+ }
+
+ implicit def fnFromProduct6[A, B, C, D, E, F, Res]: Aux[(A :: B :: C :: D :: E :: F :: HNil) ⇒ Res, (A, B, C, D, E, F) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F) ⇒ hf(a :: b :: c :: d :: e :: f :: HNil)
+ }
+
+ implicit def fnFromProduct7[A, B, C, D, E, F, G, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: HNil) ⇒ Res, (A, B, C, D, E, F, G) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: HNil)
+ }
+
+ implicit def fnFromProduct8[A, B, C, D, E, F, G, H, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: HNil)
+ }
+
+ implicit def fnFromProduct9[A, B, C, D, E, F, G, H, I, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: HNil)
+ }
+
+ implicit def fnFromProduct10[A, B, C, D, E, F, G, H, I, J, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: HNil)
+ }
+
+ implicit def fnFromProduct11[A, B, C, D, E, F, G, H, I, J, K, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: HNil)
+ }
+
+ implicit def fnFromProduct12[A, B, C, D, E, F, G, H, I, J, K, L, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: HNil)
+ }
+
+ implicit def fnFromProduct13[A, B, C, D, E, F, G, H, I, J, K, L, M, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: HNil)
+ }
+
+ implicit def fnFromProduct14[A, B, C, D, E, F, G, H, I, J, K, L, M, N, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: HNil)
+ }
+
+ implicit def fnFromProduct15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: HNil)
+ }
+
+ implicit def fnFromProduct16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: HNil)
+ }
+
+ implicit def fnFromProduct17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: HNil)
+ }
+
+ implicit def fnFromProduct18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: HNil)
+ }
+
+ implicit def fnFromProduct19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: HNil)
+ }
+
+ implicit def fnFromProduct20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: HNil)
+ }
+
+ implicit def fnFromProduct21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: HNil)
+ }
+
+ implicit def fnFromProduct22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, Res]: Aux[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil) ⇒ Res, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ Res] =
+ new FnFromProduct[(A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil) ⇒ Res] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ Res
+ def apply(hf: (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil) ⇒ Res): Out = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U, v: V) ⇒ hf(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: v :: HNil)
+ }
+
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/fntoproduct.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/fntoproduct.scala
new file mode 100644
index 0000000000..e4cd91054d
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/fntoproduct.scala
@@ -0,0 +1,164 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+package ops
+
+import function.FnToProduct
+
+trait FnToProductInstances {
+ type Aux[F, Out0] = FnToProduct[F] { type Out = Out0 }
+
+ implicit def fnToProduct0[Res]: Aux[(() ⇒ Res), (HNil) ⇒ Res] =
+ new FnToProduct[() ⇒ Res] {
+ type Out = (HNil) ⇒ Res
+ def apply(fn: () ⇒ Res): Out = (l: HNil) ⇒ fn()
+ }
+
+ implicit def fnToProduct1[A, Res]: Aux[((A) ⇒ Res), (A :: HNil) ⇒ Res] =
+ new FnToProduct[(A) ⇒ Res] {
+ type Out = (A :: HNil) ⇒ Res
+ def apply(fn: (A) ⇒ Res): Out = (l: A :: HNil) ⇒ l match { case a :: HNil ⇒ fn(a) }
+ }
+
+ implicit def fnToProduct2[A, B, Res]: Aux[((A, B) ⇒ Res), (A :: B :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B) ⇒ Res] {
+ type Out = (A :: B :: HNil) ⇒ Res
+ def apply(fn: (A, B) ⇒ Res): Out = (l: A :: B :: HNil) ⇒ l match { case a :: b :: HNil ⇒ fn(a, b) }
+ }
+
+ implicit def fnToProduct3[A, B, C, Res]: Aux[((A, B, C) ⇒ Res), (A :: B :: C :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C) ⇒ Res] {
+ type Out = (A :: B :: C :: HNil) ⇒ Res
+ def apply(fn: (A, B, C) ⇒ Res): Out = (l: A :: B :: C :: HNil) ⇒ l match { case a :: b :: c :: HNil ⇒ fn(a, b, c) }
+ }
+
+ implicit def fnToProduct4[A, B, C, D, Res]: Aux[((A, B, C, D) ⇒ Res), (A :: B :: C :: D :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D) ⇒ Res): Out = (l: A :: B :: C :: D :: HNil) ⇒ l match { case a :: b :: c :: d :: HNil ⇒ fn(a, b, c, d) }
+ }
+
+ implicit def fnToProduct5[A, B, C, D, E, Res]: Aux[((A, B, C, D, E) ⇒ Res), (A :: B :: C :: D :: E :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: HNil ⇒ fn(a, b, c, d, e) }
+ }
+
+ implicit def fnToProduct6[A, B, C, D, E, F, Res]: Aux[((A, B, C, D, E, F) ⇒ Res), (A :: B :: C :: D :: E :: F :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: HNil ⇒ fn(a, b, c, d, e, f) }
+ }
+
+ implicit def fnToProduct7[A, B, C, D, E, F, G, Res]: Aux[((A, B, C, D, E, F, G) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: HNil ⇒ fn(a, b, c, d, e, f, g) }
+ }
+
+ implicit def fnToProduct8[A, B, C, D, E, F, G, H, Res]: Aux[((A, B, C, D, E, F, G, H) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: HNil ⇒ fn(a, b, c, d, e, f, g, h) }
+ }
+
+ implicit def fnToProduct9[A, B, C, D, E, F, G, H, I, Res]: Aux[((A, B, C, D, E, F, G, H, I) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i) }
+ }
+
+ implicit def fnToProduct10[A, B, C, D, E, F, G, H, I, J, Res]: Aux[((A, B, C, D, E, F, G, H, I, J) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j) }
+ }
+
+ implicit def fnToProduct11[A, B, C, D, E, F, G, H, I, J, K, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k) }
+ }
+
+ implicit def fnToProduct12[A, B, C, D, E, F, G, H, I, J, K, L, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l) }
+ }
+
+ implicit def fnToProduct13[A, B, C, D, E, F, G, H, I, J, K, L, M, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m) }
+ }
+
+ implicit def fnToProduct14[A, B, C, D, E, F, G, H, I, J, K, L, M, N, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n) }
+ }
+
+ implicit def fnToProduct15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) }
+ }
+
+ implicit def fnToProduct16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) }
+ }
+
+ implicit def fnToProduct17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q) }
+ }
+
+ implicit def fnToProduct18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r) }
+ }
+
+ implicit def fnToProduct19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s) }
+ }
+
+ implicit def fnToProduct20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t) }
+ }
+
+ implicit def fnToProduct21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u) }
+ }
+
+ implicit def fnToProduct22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, Res]: Aux[((A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ Res), (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil) ⇒ Res] =
+ new FnToProduct[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ Res] {
+ type Out = (A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil) ⇒ Res
+ def apply(fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ Res): Out = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: v :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v) }
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/hmapbuilder.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/hmapbuilder.scala
new file mode 100644
index 0000000000..d41f2fc28e
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/hmapbuilder.scala
@@ -0,0 +1,65 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+class HMapBuilder[R[_, _]] {
+
+ def apply[K0, V0](e0: (K0, V0))(implicit ev0: R[K0, V0]) = new HMap[R](Map(e0))
+
+ def apply[K0, V0, K1, V1](e0: (K0, V0), e1: (K1, V1))(implicit ev0: R[K0, V0], ev1: R[K1, V1]) = new HMap[R](Map(e0, e1))
+
+ def apply[K0, V0, K1, V1, K2, V2](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2]) = new HMap[R](Map(e0, e1, e2))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3]) = new HMap[R](Map(e0, e1, e2, e3))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4]) = new HMap[R](Map(e0, e1, e2, e3, e4))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13, K14, V14](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13), e14: (K14, V14))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13], ev14: R[K14, V14]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13, e14))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13, K14, V14, K15, V15](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13), e14: (K14, V14), e15: (K15, V15))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13], ev14: R[K14, V14], ev15: R[K15, V15]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13, e14, e15))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13, K14, V14, K15, V15, K16, V16](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13), e14: (K14, V14), e15: (K15, V15), e16: (K16, V16))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13], ev14: R[K14, V14], ev15: R[K15, V15], ev16: R[K16, V16]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13, e14, e15, e16))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13, K14, V14, K15, V15, K16, V16, K17, V17](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13), e14: (K14, V14), e15: (K15, V15), e16: (K16, V16), e17: (K17, V17))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13], ev14: R[K14, V14], ev15: R[K15, V15], ev16: R[K16, V16], ev17: R[K17, V17]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13, e14, e15, e16, e17))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13, K14, V14, K15, V15, K16, V16, K17, V17, K18, V18](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13), e14: (K14, V14), e15: (K15, V15), e16: (K16, V16), e17: (K17, V17), e18: (K18, V18))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13], ev14: R[K14, V14], ev15: R[K15, V15], ev16: R[K16, V16], ev17: R[K17, V17], ev18: R[K18, V18]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13, e14, e15, e16, e17, e18))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13, K14, V14, K15, V15, K16, V16, K17, V17, K18, V18, K19, V19](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13), e14: (K14, V14), e15: (K15, V15), e16: (K16, V16), e17: (K17, V17), e18: (K18, V18), e19: (K19, V19))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13], ev14: R[K14, V14], ev15: R[K15, V15], ev16: R[K16, V16], ev17: R[K17, V17], ev18: R[K18, V18], ev19: R[K19, V19]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13, e14, e15, e16, e17, e18, e19))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13, K14, V14, K15, V15, K16, V16, K17, V17, K18, V18, K19, V19, K20, V20](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13), e14: (K14, V14), e15: (K15, V15), e16: (K16, V16), e17: (K17, V17), e18: (K18, V18), e19: (K19, V19), e20: (K20, V20))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13], ev14: R[K14, V14], ev15: R[K15, V15], ev16: R[K16, V16], ev17: R[K17, V17], ev18: R[K18, V18], ev19: R[K19, V19], ev20: R[K20, V20]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13, e14, e15, e16, e17, e18, e19, e20))
+
+ def apply[K0, V0, K1, V1, K2, V2, K3, V3, K4, V4, K5, V5, K6, V6, K7, V7, K8, V8, K9, V9, K10, V10, K11, V11, K12, V12, K13, V13, K14, V14, K15, V15, K16, V16, K17, V17, K18, V18, K19, V19, K20, V20, K21, V21](e0: (K0, V0), e1: (K1, V1), e2: (K2, V2), e3: (K3, V3), e4: (K4, V4), e5: (K5, V5), e6: (K6, V6), e7: (K7, V7), e8: (K8, V8), e9: (K9, V9), e10: (K10, V10), e11: (K11, V11), e12: (K12, V12), e13: (K13, V13), e14: (K14, V14), e15: (K15, V15), e16: (K16, V16), e17: (K17, V17), e18: (K18, V18), e19: (K19, V19), e20: (K20, V20), e21: (K21, V21))(implicit ev0: R[K0, V0], ev1: R[K1, V1], ev2: R[K2, V2], ev3: R[K3, V3], ev4: R[K4, V4], ev5: R[K5, V5], ev6: R[K6, V6], ev7: R[K7, V7], ev8: R[K8, V8], ev9: R[K9, V9], ev10: R[K10, V10], ev11: R[K11, V11], ev12: R[K12, V12], ev13: R[K13, V13], ev14: R[K14, V14], ev15: R[K15, V15], ev16: R[K16, V16], ev17: R[K17, V17], ev18: R[K18, V18], ev19: R[K19, V19], ev20: R[K20, V20], ev21: R[K21, V21]) = new HMap[R](Map(e0, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, e13, e14, e15, e16, e17, e18, e19, e20, e21))
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/nats.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/nats.scala
new file mode 100644
index 0000000000..eb5977fc18
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/nats.scala
@@ -0,0 +1,87 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+trait Nats {
+
+ type _1 = Succ[_0]
+ val _1: _1 = new _1
+
+ type _2 = Succ[_1]
+ val _2: _2 = new _2
+
+ type _3 = Succ[_2]
+ val _3: _3 = new _3
+
+ type _4 = Succ[_3]
+ val _4: _4 = new _4
+
+ type _5 = Succ[_4]
+ val _5: _5 = new _5
+
+ type _6 = Succ[_5]
+ val _6: _6 = new _6
+
+ type _7 = Succ[_6]
+ val _7: _7 = new _7
+
+ type _8 = Succ[_7]
+ val _8: _8 = new _8
+
+ type _9 = Succ[_8]
+ val _9: _9 = new _9
+
+ type _10 = Succ[_9]
+ val _10: _10 = new _10
+
+ type _11 = Succ[_10]
+ val _11: _11 = new _11
+
+ type _12 = Succ[_11]
+ val _12: _12 = new _12
+
+ type _13 = Succ[_12]
+ val _13: _13 = new _13
+
+ type _14 = Succ[_13]
+ val _14: _14 = new _14
+
+ type _15 = Succ[_14]
+ val _15: _15 = new _15
+
+ type _16 = Succ[_15]
+ val _16: _16 = new _16
+
+ type _17 = Succ[_16]
+ val _17: _17 = new _17
+
+ type _18 = Succ[_17]
+ val _18: _18 = new _18
+
+ type _19 = Succ[_18]
+ val _19: _19 = new _19
+
+ type _20 = Succ[_19]
+ val _20: _20 = new _20
+
+ type _21 = Succ[_20]
+ val _21: _21 = new _21
+
+ type _22 = Succ[_21]
+ val _22: _22 = new _22
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/polyapply.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/polyapply.scala
new file mode 100644
index 0000000000..b31639df9b
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/polyapply.scala
@@ -0,0 +1,88 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+trait PolyApply {
+ import poly._
+ def apply[A](a: A)(implicit cse: Case[this.type, A :: HNil]): cse.Result =
+ cse(a :: HNil)
+
+ def apply[A, B](a: A, b: B)(implicit cse: Case[this.type, A :: B :: HNil]): cse.Result =
+ cse(a :: b :: HNil)
+
+ def apply[A, B, C](a: A, b: B, c: C)(implicit cse: Case[this.type, A :: B :: C :: HNil]): cse.Result =
+ cse(a :: b :: c :: HNil)
+
+ def apply[A, B, C, D](a: A, b: B, c: C, d: D)(implicit cse: Case[this.type, A :: B :: C :: D :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: HNil)
+
+ def apply[A, B, C, D, E](a: A, b: B, c: C, d: D, e: E)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: HNil)
+
+ def apply[A, B, C, D, E, F](a: A, b: B, c: C, d: D, e: E, f: F)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: HNil)
+
+ def apply[A, B, C, D, E, F, G](a: A, b: B, c: C, d: D, e: E, f: F, g: G)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: HNil)
+
+ def apply[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U, v: V)(implicit cse: Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil]): cse.Result =
+ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: v :: HNil)
+
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/polyinst.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/polyinst.scala
new file mode 100644
index 0000000000..b3d4a440d1
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/polyinst.scala
@@ -0,0 +1,88 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+trait PolyInst {
+
+ implicit def inst1[A](fn: Poly)(implicit cse: fn.ProductCase[A :: HNil]): (A) ⇒ cse.Result =
+ (a: A) ⇒ cse(a :: HNil)
+
+ implicit def inst2[A, B](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: HNil]): (A, B) ⇒ cse.Result =
+ (a: A, b: B) ⇒ cse(a :: b :: HNil)
+
+ implicit def inst3[A, B, C](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: HNil]): (A, B, C) ⇒ cse.Result =
+ (a: A, b: B, c: C) ⇒ cse(a :: b :: c :: HNil)
+
+ implicit def inst4[A, B, C, D](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: HNil]): (A, B, C, D) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D) ⇒ cse(a :: b :: c :: d :: HNil)
+
+ implicit def inst5[A, B, C, D, E](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: HNil]): (A, B, C, D, E) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E) ⇒ cse(a :: b :: c :: d :: e :: HNil)
+
+ implicit def inst6[A, B, C, D, E, F](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: HNil]): (A, B, C, D, E, F) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F) ⇒ cse(a :: b :: c :: d :: e :: f :: HNil)
+
+ implicit def inst7[A, B, C, D, E, F, G](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: HNil]): (A, B, C, D, E, F, G) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: HNil)
+
+ implicit def inst8[A, B, C, D, E, F, G, H](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: HNil]): (A, B, C, D, E, F, G, H) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: HNil)
+
+ implicit def inst9[A, B, C, D, E, F, G, H, I](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil]): (A, B, C, D, E, F, G, H, I) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: HNil)
+
+ implicit def inst10[A, B, C, D, E, F, G, H, I, J](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil]): (A, B, C, D, E, F, G, H, I, J) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: HNil)
+
+ implicit def inst11[A, B, C, D, E, F, G, H, I, J, K](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil]): (A, B, C, D, E, F, G, H, I, J, K) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: HNil)
+
+ implicit def inst12[A, B, C, D, E, F, G, H, I, J, K, L](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: HNil)
+
+ implicit def inst13[A, B, C, D, E, F, G, H, I, J, K, L, M](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: HNil)
+
+ implicit def inst14[A, B, C, D, E, F, G, H, I, J, K, L, M, N](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: HNil)
+
+ implicit def inst15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: HNil)
+
+ implicit def inst16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: HNil)
+
+ implicit def inst17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: HNil)
+
+ implicit def inst18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: HNil)
+
+ implicit def inst19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: HNil)
+
+ implicit def inst20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: HNil)
+
+ implicit def inst21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: HNil)
+
+ implicit def inst22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](fn: Poly)(implicit cse: fn.ProductCase[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil]): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ cse.Result =
+ (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U, v: V) ⇒ cse(a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: v :: HNil)
+
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/polyntraits.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/polyntraits.scala
new file mode 100644
index 0000000000..a49895d78c
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/polyntraits.scala
@@ -0,0 +1,392 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+trait Poly1 extends Poly { outer ⇒
+ type Case[A] = poly.Case[this.type, A :: HNil]
+
+ object Case {
+ type Aux[A, Result0] = poly.Case[outer.type, A :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A] {
+ def apply[Res](fn: (A) ⇒ Res) = new Case[A] {
+ type Result = Res
+ val value = (l: A :: HNil) ⇒ l match { case a :: HNil ⇒ fn(a) }
+ }
+ }
+
+ def at[A] = new CaseBuilder[A]
+}
+
+trait Poly2 extends Poly { outer ⇒
+ type Case[A, B] = poly.Case[this.type, A :: B :: HNil]
+
+ object Case {
+ type Aux[A, B, Result0] = poly.Case[outer.type, A :: B :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B] {
+ def apply[Res](fn: (A, B) ⇒ Res) = new Case[A, B] {
+ type Result = Res
+ val value = (l: A :: B :: HNil) ⇒ l match { case a :: b :: HNil ⇒ fn(a, b) }
+ }
+ }
+
+ def at[A, B] = new CaseBuilder[A, B]
+}
+
+trait Poly3 extends Poly { outer ⇒
+ type Case[A, B, C] = poly.Case[this.type, A :: B :: C :: HNil]
+
+ object Case {
+ type Aux[A, B, C, Result0] = poly.Case[outer.type, A :: B :: C :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C] {
+ def apply[Res](fn: (A, B, C) ⇒ Res) = new Case[A, B, C] {
+ type Result = Res
+ val value = (l: A :: B :: C :: HNil) ⇒ l match { case a :: b :: c :: HNil ⇒ fn(a, b, c) }
+ }
+ }
+
+ def at[A, B, C] = new CaseBuilder[A, B, C]
+}
+
+trait Poly4 extends Poly { outer ⇒
+ type Case[A, B, C, D] = poly.Case[this.type, A :: B :: C :: D :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, Result0] = poly.Case[outer.type, A :: B :: C :: D :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D] {
+ def apply[Res](fn: (A, B, C, D) ⇒ Res) = new Case[A, B, C, D] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: HNil) ⇒ l match { case a :: b :: c :: d :: HNil ⇒ fn(a, b, c, d) }
+ }
+ }
+
+ def at[A, B, C, D] = new CaseBuilder[A, B, C, D]
+}
+
+trait Poly5 extends Poly { outer ⇒
+ type Case[A, B, C, D, E] = poly.Case[this.type, A :: B :: C :: D :: E :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E] {
+ def apply[Res](fn: (A, B, C, D, E) ⇒ Res) = new Case[A, B, C, D, E] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: HNil ⇒ fn(a, b, c, d, e) }
+ }
+ }
+
+ def at[A, B, C, D, E] = new CaseBuilder[A, B, C, D, E]
+}
+
+trait Poly6 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F] {
+ def apply[Res](fn: (A, B, C, D, E, F) ⇒ Res) = new Case[A, B, C, D, E, F] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: HNil ⇒ fn(a, b, c, d, e, f) }
+ }
+ }
+
+ def at[A, B, C, D, E, F] = new CaseBuilder[A, B, C, D, E, F]
+}
+
+trait Poly7 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G] {
+ def apply[Res](fn: (A, B, C, D, E, F, G) ⇒ Res) = new Case[A, B, C, D, E, F, G] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: HNil ⇒ fn(a, b, c, d, e, f, g) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G] = new CaseBuilder[A, B, C, D, E, F, G]
+}
+
+trait Poly8 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H) ⇒ Res) = new Case[A, B, C, D, E, F, G, H] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: HNil ⇒ fn(a, b, c, d, e, f, g, h) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H] = new CaseBuilder[A, B, C, D, E, F, G, H]
+}
+
+trait Poly9 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I] = new CaseBuilder[A, B, C, D, E, F, G, H, I]
+}
+
+trait Poly10 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J]
+}
+
+trait Poly11 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K]
+}
+
+trait Poly12 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L]
+}
+
+trait Poly13 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M]
+}
+
+trait Poly14 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N]
+}
+
+trait Poly15 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]
+}
+
+trait Poly16 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]
+}
+
+trait Poly17 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]
+}
+
+trait Poly18 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]
+}
+
+trait Poly19 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]
+}
+
+trait Poly20 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]
+}
+
+trait Poly21 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]
+}
+
+trait Poly22 extends Poly { outer ⇒
+ type Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] = poly.Case[this.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil]
+
+ object Case {
+ type Aux[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, Result0] = poly.Case[outer.type, A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil] { type Result = Result0 }
+ }
+
+ class CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] {
+ def apply[Res](fn: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) ⇒ Res) = new Case[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] {
+ type Result = Res
+ val value = (l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil) ⇒ l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: v :: HNil ⇒ fn(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v) }
+ }
+ }
+
+ def at[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] = new CaseBuilder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/sizedbuilder.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/sizedbuilder.scala
new file mode 100644
index 0000000000..412f7d6fce
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/sizedbuilder.scala
@@ -0,0 +1,91 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+class SizedBuilder[CC[_]] {
+ import scala.collection.generic.CanBuildFrom
+ import nat._
+ import Sized.wrap
+
+ def apply[T](a: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _1]((cbf() += (a)).result)
+
+ def apply[T](a: T, b: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _2]((cbf() += (a, b)).result)
+
+ def apply[T](a: T, b: T, c: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _3]((cbf() += (a, b, c)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _4]((cbf() += (a, b, c, d)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _5]((cbf() += (a, b, c, d, e)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _6]((cbf() += (a, b, c, d, e, f)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _7]((cbf() += (a, b, c, d, e, f, g)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _8]((cbf() += (a, b, c, d, e, f, g, h)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _9]((cbf() += (a, b, c, d, e, f, g, h, i)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _10]((cbf() += (a, b, c, d, e, f, g, h, i, j)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _11]((cbf() += (a, b, c, d, e, f, g, h, i, j, k)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _12]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _13]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _14]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T, o: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _15]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T, o: T, p: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _16]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T, o: T, p: T, q: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _17]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T, o: T, p: T, q: T, r: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _18]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T, o: T, p: T, q: T, r: T, s: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _19]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T, o: T, p: T, q: T, r: T, s: T, t: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _20]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T, o: T, p: T, q: T, r: T, s: T, t: T, u: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _21]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u)).result)
+
+ def apply[T](a: T, b: T, c: T, d: T, e: T, f: T, g: T, h: T, i: T, j: T, k: T, l: T, m: T, n: T, o: T, p: T, q: T, r: T, s: T, t: T, u: T, v: T)(implicit cbf: CanBuildFrom[Nothing, T, CC[T]]) =
+ wrap[CC[T], _22]((cbf() += (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v)).result)
+
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/tupler.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/tupler.scala
new file mode 100644
index 0000000000..f3d68bce17
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/tupler.scala
@@ -0,0 +1,158 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+package ops
+
+import hlist.Tupler
+
+trait TuplerInstances {
+ type Aux[L <: HList, Out0] = Tupler[L] { type Out = Out0 }
+
+ implicit def hlistTupler1[A]: Aux[A :: HNil, Tuple1[A]] =
+ new Tupler[A :: HNil] {
+ type Out = Tuple1[A]
+ def apply(l: A :: HNil): Out = l match { case a :: HNil ⇒ Tuple1(a) }
+ }
+
+ implicit def hlistTupler2[A, B]: Aux[A :: B :: HNil, (A, B)] =
+ new Tupler[A :: B :: HNil] {
+ type Out = (A, B)
+ def apply(l: A :: B :: HNil): Out = l match { case a :: b :: HNil ⇒ (a, b) }
+ }
+
+ implicit def hlistTupler3[A, B, C]: Aux[A :: B :: C :: HNil, (A, B, C)] =
+ new Tupler[A :: B :: C :: HNil] {
+ type Out = (A, B, C)
+ def apply(l: A :: B :: C :: HNil): Out = l match { case a :: b :: c :: HNil ⇒ (a, b, c) }
+ }
+
+ implicit def hlistTupler4[A, B, C, D]: Aux[A :: B :: C :: D :: HNil, (A, B, C, D)] =
+ new Tupler[A :: B :: C :: D :: HNil] {
+ type Out = (A, B, C, D)
+ def apply(l: A :: B :: C :: D :: HNil): Out = l match { case a :: b :: c :: d :: HNil ⇒ (a, b, c, d) }
+ }
+
+ implicit def hlistTupler5[A, B, C, D, E]: Aux[A :: B :: C :: D :: E :: HNil, (A, B, C, D, E)] =
+ new Tupler[A :: B :: C :: D :: E :: HNil] {
+ type Out = (A, B, C, D, E)
+ def apply(l: A :: B :: C :: D :: E :: HNil): Out = l match { case a :: b :: c :: d :: e :: HNil ⇒ (a, b, c, d, e) }
+ }
+
+ implicit def hlistTupler6[A, B, C, D, E, F]: Aux[A :: B :: C :: D :: E :: F :: HNil, (A, B, C, D, E, F)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: HNil] {
+ type Out = (A, B, C, D, E, F)
+ def apply(l: A :: B :: C :: D :: E :: F :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: HNil ⇒ (a, b, c, d, e, f) }
+ }
+
+ implicit def hlistTupler7[A, B, C, D, E, F, G]: Aux[A :: B :: C :: D :: E :: F :: G :: HNil, (A, B, C, D, E, F, G)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: HNil] {
+ type Out = (A, B, C, D, E, F, G)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: HNil ⇒ (a, b, c, d, e, f, g) }
+ }
+
+ implicit def hlistTupler8[A, B, C, D, E, F, G, H]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: HNil, (A, B, C, D, E, F, G, H)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: HNil ⇒ (a, b, c, d, e, f, g, h) }
+ }
+
+ implicit def hlistTupler9[A, B, C, D, E, F, G, H, I]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil, (A, B, C, D, E, F, G, H, I)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: HNil ⇒ (a, b, c, d, e, f, g, h, i) }
+ }
+
+ implicit def hlistTupler10[A, B, C, D, E, F, G, H, I, J]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil, (A, B, C, D, E, F, G, H, I, J)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j) }
+ }
+
+ implicit def hlistTupler11[A, B, C, D, E, F, G, H, I, J, K]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil, (A, B, C, D, E, F, G, H, I, J, K)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k) }
+ }
+
+ implicit def hlistTupler12[A, B, C, D, E, F, G, H, I, J, K, L]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l) }
+ }
+
+ implicit def hlistTupler13[A, B, C, D, E, F, G, H, I, J, K, L, M]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m) }
+ }
+
+ implicit def hlistTupler14[A, B, C, D, E, F, G, H, I, J, K, L, M, N]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n) }
+ }
+
+ implicit def hlistTupler15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) }
+ }
+
+ implicit def hlistTupler16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p) }
+ }
+
+ implicit def hlistTupler17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q) }
+ }
+
+ implicit def hlistTupler18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r) }
+ }
+
+ implicit def hlistTupler19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s) }
+ }
+
+ implicit def hlistTupler20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t) }
+ }
+
+ implicit def hlistTupler21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u) }
+ }
+
+ implicit def hlistTupler22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]: Aux[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] =
+ new Tupler[A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil] {
+ type Out = (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)
+ def apply(l: A :: B :: C :: D :: E :: F :: G :: H :: I :: J :: K :: L :: M :: N :: O :: P :: Q :: R :: S :: T :: U :: V :: HNil): Out = l match { case a :: b :: c :: d :: e :: f :: g :: h :: i :: j :: k :: l :: m :: n :: o :: p :: q :: r :: s :: t :: u :: v :: HNil ⇒ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v) }
+ }
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generated/tupletypeables.scala b/akka-parsing/src/main/scala/akka/shapeless/generated/tupletypeables.scala
new file mode 100644
index 0000000000..b4d2072ae2
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generated/tupletypeables.scala
@@ -0,0 +1,265 @@
+
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+trait TupleTypeableInstances {
+ import syntax.typeable._
+
+ implicit def tuple1Typeable[A](implicit castA: Typeable[A]) = new Typeable[Tuple1[A]] {
+ def cast(t: Any): Option[Tuple1[A]] = {
+ if (t == null) Some(t.asInstanceOf[Tuple1[A]])
+ else if (t.isInstanceOf[Tuple1[_]]) {
+ val p = t.asInstanceOf[Tuple1[_]]
+ for (_ ← p._1.cast[A])
+ yield t.asInstanceOf[Tuple1[A]]
+ } else None
+ }
+ }
+
+ implicit def tuple2Typeable[A, B](implicit castA: Typeable[A], castB: Typeable[B]) = new Typeable[(A, B)] {
+ def cast(t: Any): Option[(A, B)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B)])
+ else if (t.isInstanceOf[(_, _)]) {
+ val p = t.asInstanceOf[(_, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B])
+ yield t.asInstanceOf[(A, B)]
+ } else None
+ }
+ }
+
+ implicit def tuple3Typeable[A, B, C](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C]) = new Typeable[(A, B, C)] {
+ def cast(t: Any): Option[(A, B, C)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C)])
+ else if (t.isInstanceOf[(_, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C])
+ yield t.asInstanceOf[(A, B, C)]
+ } else None
+ }
+ }
+
+ implicit def tuple4Typeable[A, B, C, D](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D]) = new Typeable[(A, B, C, D)] {
+ def cast(t: Any): Option[(A, B, C, D)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D)])
+ else if (t.isInstanceOf[(_, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D])
+ yield t.asInstanceOf[(A, B, C, D)]
+ } else None
+ }
+ }
+
+ implicit def tuple5Typeable[A, B, C, D, E](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E]) = new Typeable[(A, B, C, D, E)] {
+ def cast(t: Any): Option[(A, B, C, D, E)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E)])
+ else if (t.isInstanceOf[(_, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E])
+ yield t.asInstanceOf[(A, B, C, D, E)]
+ } else None
+ }
+ }
+
+ implicit def tuple6Typeable[A, B, C, D, E, F](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F]) = new Typeable[(A, B, C, D, E, F)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F])
+ yield t.asInstanceOf[(A, B, C, D, E, F)]
+ } else None
+ }
+ }
+
+ implicit def tuple7Typeable[A, B, C, D, E, F, G](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G]) = new Typeable[(A, B, C, D, E, F, G)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G)]
+ } else None
+ }
+ }
+
+ implicit def tuple8Typeable[A, B, C, D, E, F, G, H](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H]) = new Typeable[(A, B, C, D, E, F, G, H)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H)]
+ } else None
+ }
+ }
+
+ implicit def tuple9Typeable[A, B, C, D, E, F, G, H, I](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I]) = new Typeable[(A, B, C, D, E, F, G, H, I)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I)]
+ } else None
+ }
+ }
+
+ implicit def tuple10Typeable[A, B, C, D, E, F, G, H, I, J](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J]) = new Typeable[(A, B, C, D, E, F, G, H, I, J)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J)]
+ } else None
+ }
+ }
+
+ implicit def tuple11Typeable[A, B, C, D, E, F, G, H, I, J, K](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K)]
+ } else None
+ }
+ }
+
+ implicit def tuple12Typeable[A, B, C, D, E, F, G, H, I, J, K, L](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L)]
+ } else None
+ }
+ }
+
+ implicit def tuple13Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M)]
+ } else None
+ }
+ }
+
+ implicit def tuple14Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)]
+ } else None
+ }
+ }
+
+ implicit def tuple15Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N], castO: Typeable[O]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N]; _ ← p._15.cast[O])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)]
+ } else None
+ }
+ }
+
+ implicit def tuple16Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N], castO: Typeable[O], castP: Typeable[P]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N]; _ ← p._15.cast[O]; _ ← p._16.cast[P])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)]
+ } else None
+ }
+ }
+
+ implicit def tuple17Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N], castO: Typeable[O], castP: Typeable[P], castQ: Typeable[Q]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N]; _ ← p._15.cast[O]; _ ← p._16.cast[P]; _ ← p._17.cast[Q])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)]
+ } else None
+ }
+ }
+
+ implicit def tuple18Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N], castO: Typeable[O], castP: Typeable[P], castQ: Typeable[Q], castR: Typeable[R]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N]; _ ← p._15.cast[O]; _ ← p._16.cast[P]; _ ← p._17.cast[Q]; _ ← p._18.cast[R])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)]
+ } else None
+ }
+ }
+
+ implicit def tuple19Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N], castO: Typeable[O], castP: Typeable[P], castQ: Typeable[Q], castR: Typeable[R], castS: Typeable[S]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N]; _ ← p._15.cast[O]; _ ← p._16.cast[P]; _ ← p._17.cast[Q]; _ ← p._18.cast[R]; _ ← p._19.cast[S])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)]
+ } else None
+ }
+ }
+
+ implicit def tuple20Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N], castO: Typeable[O], castP: Typeable[P], castQ: Typeable[Q], castR: Typeable[R], castS: Typeable[S], castT: Typeable[T]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N]; _ ← p._15.cast[O]; _ ← p._16.cast[P]; _ ← p._17.cast[Q]; _ ← p._18.cast[R]; _ ← p._19.cast[S]; _ ← p._20.cast[T])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)]
+ } else None
+ }
+ }
+
+ implicit def tuple21Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N], castO: Typeable[O], castP: Typeable[P], castQ: Typeable[Q], castR: Typeable[R], castS: Typeable[S], castT: Typeable[T], castU: Typeable[U]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N]; _ ← p._15.cast[O]; _ ← p._16.cast[P]; _ ← p._17.cast[Q]; _ ← p._18.cast[R]; _ ← p._19.cast[S]; _ ← p._20.cast[T]; _ ← p._21.cast[U])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)]
+ } else None
+ }
+ }
+
+ implicit def tuple22Typeable[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit castA: Typeable[A], castB: Typeable[B], castC: Typeable[C], castD: Typeable[D], castE: Typeable[E], castF: Typeable[F], castG: Typeable[G], castH: Typeable[H], castI: Typeable[I], castJ: Typeable[J], castK: Typeable[K], castL: Typeable[L], castM: Typeable[M], castN: Typeable[N], castO: Typeable[O], castP: Typeable[P], castQ: Typeable[Q], castR: Typeable[R], castS: Typeable[S], castT: Typeable[T], castU: Typeable[U], castV: Typeable[V]) = new Typeable[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] {
+ def cast(t: Any): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] = {
+ if (t == null) Some(t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)])
+ else if (t.isInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]) {
+ val p = t.asInstanceOf[(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _)]
+ for (_ ← p._1.cast[A]; _ ← p._2.cast[B]; _ ← p._3.cast[C]; _ ← p._4.cast[D]; _ ← p._5.cast[E]; _ ← p._6.cast[F]; _ ← p._7.cast[G]; _ ← p._8.cast[H]; _ ← p._9.cast[I]; _ ← p._10.cast[J]; _ ← p._11.cast[K]; _ ← p._12.cast[L]; _ ← p._13.cast[M]; _ ← p._14.cast[N]; _ ← p._15.cast[O]; _ ← p._16.cast[P]; _ ← p._17.cast[Q]; _ ← p._18.cast[R]; _ ← p._19.cast[S]; _ ← p._20.cast[T]; _ ← p._21.cast[U]; _ ← p._22.cast[V])
+ yield t.asInstanceOf[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)]
+ } else None
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/akka-parsing/src/main/scala/akka/shapeless/generic.scala b/akka-parsing/src/main/scala/akka/shapeless/generic.scala
new file mode 100644
index 0000000000..14a01b4b58
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/generic.scala
@@ -0,0 +1,547 @@
+/*
+ * Copyright (c) 2012-14 Lars Hupel, Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.language.experimental.macros
+
+import scala.collection.breakOut
+import scala.collection.immutable.ListMap
+import scala.reflect.macros.Context
+
+trait Generic[T] {
+ type Repr
+ def to(t: T): Repr
+ def from(r: Repr): T
+}
+
+trait LowPriorityGeneric {
+ implicit def apply[T]: Generic[T] = macro GenericMacros.materialize[T]
+}
+
+object Generic extends LowPriorityGeneric {
+ type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 }
+
+ // Refinement for products, here we can provide the calling context with
+ // a proof that the resulting Repr <: HList
+ implicit def product[T <: Product]: Generic[T] = macro GenericMacros.materializeForProduct[T]
+}
+
+trait LabelledGeneric[T] {
+ type Repr
+ def to(t: T): Repr
+ def from(r: Repr): T
+}
+
+trait LowPriorityLabelledGeneric {
+ implicit def apply[T]: LabelledGeneric[T] = macro GenericMacros.materializeLabelled[T]
+}
+
+object LabelledGeneric extends LowPriorityLabelledGeneric {
+ // Refinement for products, here we can provide the calling context with
+ // a proof that the resulting Repr is a record
+ type Aux[T, Out0] = LabelledGeneric[T] { type Repr = Out0 }
+
+ implicit def product[T <: Product]: LabelledGeneric[T] = macro GenericMacros.materializeLabelledForProduct[T]
+}
+
+object GenericMacros {
+ import akka.shapeless.record.FieldType
+
+ def materialize[T](c: Context)(implicit tT: c.WeakTypeTag[T]): c.Expr[Generic[T]] =
+ materializeAux[Generic[T]](c)(false, false, tT.tpe)
+
+ def materializeForProduct[T <: Product](c: Context)(implicit tT: c.WeakTypeTag[T]): c.Expr[Generic[T] { type Repr <: HList }] =
+ materializeAux[Generic[T] { type Repr <: HList }](c)(true, false, tT.tpe)
+
+ def materializeLabelled[T](c: Context)(implicit tT: c.WeakTypeTag[T]): c.Expr[LabelledGeneric[T]] =
+ materializeAux[LabelledGeneric[T]](c)(false, true, tT.tpe)
+
+ def materializeLabelledForProduct[T <: Product](c: Context)(implicit tT: c.WeakTypeTag[T]): c.Expr[LabelledGeneric[T] { type Repr <: HList }] =
+ materializeAux[LabelledGeneric[T] { type Repr <: HList }](c)(true, true, tT.tpe)
+
+ def materializeAux[G](c0: Context)(product0: Boolean, labelled0: Boolean, tpe0: c0.Type): c0.Expr[G] = {
+ import c0.{ abort, enclosingPosition, typeOf, Expr }
+
+ if (product0 && tpe0 <:< typeOf[Coproduct])
+ abort(enclosingPosition, s"Cannot materialize Coproduct $tpe0 as a Product")
+
+ val helper = new Helper[c0.type] {
+ val c: c0.type = c0
+ val fromTpe = tpe0
+ val toProduct = product0
+ val toLabelled = labelled0
+ val labelledRepr = labelled0
+ }
+
+ Expr[G] {
+ if (tpe0 <:< typeOf[HList] || tpe0 <:< typeOf[Coproduct])
+ helper.materializeIdentityGeneric
+ else
+ helper.materializeGeneric
+ }
+ }
+
+ def deriveProductInstance[C[_], T](c: Context)(ev: c.Expr[_])(implicit tTag: c.WeakTypeTag[T], cTag: c.WeakTypeTag[C[Any]]): c.Expr[C[T]] =
+ deriveInstanceAux(c)(ev.tree, true, false, tTag, cTag)
+
+ def deriveLabelledProductInstance[C[_], T](c: Context)(ev: c.Expr[_])(implicit tTag: c.WeakTypeTag[T], cTag: c.WeakTypeTag[C[Any]]): c.Expr[C[T]] =
+ deriveInstanceAux(c)(ev.tree, true, true, tTag, cTag)
+
+ def deriveInstance[C[_], T](c: Context)(ev: c.Expr[_])(implicit tTag: c.WeakTypeTag[T], cTag: c.WeakTypeTag[C[Any]]): c.Expr[C[T]] =
+ deriveInstanceAux(c)(ev.tree, false, false, tTag, cTag)
+
+ def deriveLabelledInstance[C[_], T](c: Context)(ev: c.Expr[_])(implicit tTag: c.WeakTypeTag[T], cTag: c.WeakTypeTag[C[Any]]): c.Expr[C[T]] =
+ deriveInstanceAux(c)(ev.tree, false, true, tTag, cTag)
+
+ def deriveInstanceAux[C[_], T](c0: Context)(deriver: c0.Tree, product0: Boolean, labelled0: Boolean, tTag: c0.WeakTypeTag[T], cTag: c0.WeakTypeTag[C[Any]]): c0.Expr[C[T]] = {
+ import c0.Expr
+ val helper = new Helper[c0.type] {
+ val c: c0.type = c0
+ val fromTpe = tTag.tpe
+ val toProduct = product0
+ val toLabelled = labelled0
+ val labelledRepr = false
+ }
+
+ Expr[C[T]] {
+ helper.deriveInstance(deriver, cTag.tpe.typeConstructor)
+ }
+ }
+
+ trait Helper[+C <: Context] {
+ val c: C
+ val fromTpe: c.Type
+ val toProduct: Boolean
+ val toLabelled: Boolean
+ val labelledRepr: Boolean
+
+ import c.universe._
+ import Flag._
+
+ def unitValueTree = reify { () }.tree
+ def absurdValueTree = reify { ??? }.tree
+ def hconsValueTree = reify { :: }.tree
+ def hnilValueTree = reify { HNil }.tree
+ def inlValueTree = reify { Inl }.tree
+ def inrValueTree = reify { Inr }.tree
+
+ def anyRefTpe = typeOf[AnyRef]
+ def unitTpe = typeOf[Unit]
+ def hconsTpe = typeOf[::[_, _]].typeConstructor
+ def hnilTpe = typeOf[HNil]
+ def cconsTpe = typeOf[:+:[_, _]].typeConstructor
+ def cnilTpe = typeOf[CNil]
+ def atatTpe = typeOf[tag.@@[_, _]].typeConstructor
+ def symTpe = typeOf[scala.Symbol]
+ def fieldTypeTpe = typeOf[FieldType[_, _]].typeConstructor
+ def genericTpe = typeOf[Generic[_]].typeConstructor
+ def labelledGenericTpe = typeOf[LabelledGeneric[_]].typeConstructor
+ def typeClassTpe = typeOf[TypeClass[Any]].typeConstructor
+ def labelledTypeClassTpe = typeOf[LabelledTypeClass[Any]].typeConstructor
+ def productTypeClassTpe = typeOf[ProductTypeClass[Any]].typeConstructor
+ def labelledProductTypeClassTpe = typeOf[LabelledProductTypeClass[Any]].typeConstructor
+ def deriveCtorsTpe = typeOf[DeriveConstructors]
+
+ def toName = newTermName("to")
+ def fromName = newTermName("from")
+ def reprName = newTypeName("Repr")
+
+ def nameAsValue(name: Name): Constant = Constant(name.decoded.trim)
+
+ def nameAsLiteral(name: Name): Tree = Literal(nameAsValue(name))
+
+ def nameOf(tpe: Type) = tpe.typeSymbol.name
+
+ def fieldsOf(tpe: Type): List[(Name, Type)] =
+ tpe.declarations.toList collect {
+ case sym: TermSymbol if sym.isVal && sym.isCaseAccessor ⇒ (sym.name, sym.typeSignatureIn(tpe))
+ }
+
+ def reprOf(tpe: Type): Type = {
+ val fields = fieldsOf(tpe)
+ if (labelledRepr)
+ mkRecordTpe(fields)
+ else
+ mkHListTpe(fields.map(_._2))
+ }
+
+ def mkCompoundTpe(nil: Type, cons: Type, items: List[Type]): Type =
+ items.foldRight(nil) { case (tpe, acc) ⇒ appliedType(cons, List(tpe, acc)) }
+
+ def mkFieldTpe(name: Name, valueTpe: Type): Type = {
+ val keyTpe = appliedType(atatTpe, List(symTpe, ConstantType(nameAsValue(name))))
+ appliedType(fieldTypeTpe, List(keyTpe, valueTpe))
+ }
+
+ def mkHListTpe(items: List[Type]): Type =
+ mkCompoundTpe(hnilTpe, hconsTpe, items)
+
+ def mkRecordTpe(fields: List[(Name, Type)]): Type =
+ mkCompoundTpe(hnilTpe, hconsTpe, fields.map((mkFieldTpe _).tupled))
+
+ def mkCoproductTpe(items: List[Type]): Type =
+ mkCompoundTpe(cnilTpe, cconsTpe, items)
+
+ def mkUnionTpe(fields: List[(Name, Type)]): Type =
+ mkCompoundTpe(cnilTpe, cconsTpe, fields.map((mkFieldTpe _).tupled))
+
+ lazy val fromSym = {
+ val sym = fromTpe.typeSymbol
+ if (!sym.isClass)
+ abort(s"$sym is not a class or trait")
+
+ val fromSym0 = sym.asClass
+ fromSym0.typeSignature // Workaround for
+
+ fromSym0
+ }
+
+ lazy val fromProduct = fromTpe =:= unitTpe || fromSym.isCaseClass
+
+ lazy val fromCtors = {
+ def collectCtors(classSym: ClassSymbol): List[ClassSymbol] = {
+ classSym.knownDirectSubclasses.toList flatMap { child0 ⇒
+ val child = child0.asClass
+ child.typeSignature // Workaround for
+ if (child.isCaseClass)
+ List(child)
+ else if (child.isSealed)
+ collectCtors(child)
+ else
+ abort(s"$child is not a case class or a sealed trait")
+ }
+ }
+
+ if (fromProduct)
+ List(fromTpe)
+ else if (fromSym.isSealed) { // multiple ctors
+ if (toProduct) abort(s"Cannot derive a ProductTypeClass for non-Product trait $fromTpe")
+ val ctors = collectCtors(fromSym).sortBy(_.fullName)
+ if (ctors.isEmpty) abort(s"Sealed trait $fromTpe has no case class subtypes")
+
+ // We're using an extremely optimistic strategy here, basically ignoring
+ // the existence of any existential types.
+ val baseTpe: TypeRef = fromTpe match {
+ case tr: TypeRef ⇒ tr
+ case _ ⇒ abort(s"bad type $fromTpe")
+ }
+
+ ctors map { sym ⇒
+ val subTpe = sym.asType.toType
+ val normalized = sym.typeParams match {
+ case Nil ⇒ subTpe
+ case tpes ⇒ appliedType(subTpe, baseTpe.args)
+ }
+
+ normalized
+ }
+ } else
+ abort(s"$fromSym is not a case class, a sealed trait or Unit")
+ }
+
+ def abort(msg: String) =
+ c.abort(c.enclosingPosition, msg)
+
+ def mkObjectSelection(defns: List[Tree], member: TermName): Tree = {
+ val name = newTermName(c.fresh())
+
+ val module =
+ ModuleDef(
+ Modifiers(),
+ name,
+ Template(
+ List(TypeTree(anyRefTpe)),
+ emptyValDef,
+ mkConstructor :: defns))
+
+ Block(
+ List(module),
+ Select(Ident(name), member))
+ }
+
+ def mkClass(parent: Type, defns: List[Tree]): Tree = {
+ val name = newTypeName(c.fresh())
+
+ val clazz =
+ ClassDef(
+ Modifiers(FINAL),
+ name,
+ List(),
+ Template(
+ List(TypeTree(parent)),
+ emptyValDef,
+ mkConstructor :: defns))
+
+ Block(
+ List(clazz),
+ Apply(Select(New(Ident(name)), nme.CONSTRUCTOR), List()))
+ }
+
+ def mkConstructor =
+ DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ List(),
+ List(List()),
+ TypeTree(),
+ Block(
+ List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
+ Literal(Constant(()))))
+
+ def mkElem(elem: Tree, name: Name, tpe: Type): Tree =
+ if (labelledRepr)
+ TypeApply(Select(elem, newTermName("asInstanceOf")), List(TypeTree(mkFieldTpe(name, tpe))))
+ else
+ elem
+
+ type ProductCaseFn = Type ⇒ CaseDef
+ type CaseFn = (Type, Int) ⇒ CaseDef
+
+ def mkProductCases(toRepr: ProductCaseFn, fromRepr: ProductCaseFn): (List[CaseDef], List[CaseDef]) =
+ (List(toRepr(fromTpe)), List(fromRepr(fromTpe)))
+
+ def mkCases(toRepr: CaseFn, fromRepr: CaseFn): (List[CaseDef], List[CaseDef]) = {
+ val to = fromCtors zip (Stream from 0) map toRepr.tupled
+ val from = fromCtors zip (Stream from 0) map fromRepr.tupled
+ val redundantCatchAllCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, absurdValueTree)
+ (to, from :+ redundantCatchAllCase)
+ }
+
+ def mkTrans(name: TermName, inputTpe: Type, outputTpe: Type, cases: List[CaseDef]): Tree = {
+ val param = newTermName(c.fresh("param"))
+
+ DefDef(
+ Modifiers(),
+ name,
+ List(),
+ List(List(ValDef(Modifiers(PARAM), param, TypeTree(inputTpe), EmptyTree))),
+ TypeTree(outputTpe),
+ Match(Ident(param), cases))
+ }
+
+ def mkCoproductValue(tree: Tree, index: Int): Tree = {
+ val inl = Apply(inlValueTree, List(tree))
+ (0 until index).foldLeft(inl: Tree) {
+ case (acc, _) ⇒
+ Apply(inrValueTree, List(acc))
+ }
+ }
+
+ def mkToCoproductCase(tpe: Type, index: Int): CaseDef = {
+ val name = newTermName(c.fresh("pat"))
+ CaseDef(
+ Bind(name, Typed(Ident(nme.WILDCARD), TypeTree(tpe))),
+ EmptyTree,
+ mkCoproductValue(mkElem(Ident(name), nameOf(tpe), tpe), index))
+ }
+
+ def mkFromCoproductCase(tpe: Type, index: Int): CaseDef = {
+ val name = newTermName(c.fresh("pat"))
+ CaseDef(
+ mkCoproductValue(Bind(name, Ident(nme.WILDCARD)), index),
+ EmptyTree,
+ Ident(name))
+ }
+
+ def mkBinder(boundName: Name, name: Name, tpe: Type) = Bind(boundName, Ident(nme.WILDCARD))
+ def mkValue(boundName: Name, name: Name, tpe: Type) = mkElem(Ident(boundName), name, tpe)
+
+ def mkTransCase(
+ tpe: Type,
+ bindFrom: (Name, Name, Type) ⇒ Tree,
+ bindRepr: (Name, Name, Type) ⇒ Tree)(mkCaseDef: (Tree, Tree) ⇒ CaseDef): CaseDef = {
+ val boundFields = fieldsOf(tpe).map { case (name, tpe) ⇒ (newTermName(c.fresh("pat")), name, tpe) }
+
+ val fromTree =
+ if (tpe =:= unitTpe) unitValueTree
+ else Apply(Ident(tpe.typeSymbol.companionSymbol.asTerm), boundFields.map(bindFrom.tupled))
+
+ val reprTree =
+ boundFields.foldRight(hnilValueTree) {
+ case (bf, acc) ⇒ Apply(hconsValueTree, List(bindRepr.tupled(bf), acc))
+ }
+
+ mkCaseDef(fromTree, reprTree)
+ }
+
+ def mkToProductReprCase(tpe: Type): CaseDef =
+ mkTransCase(tpe, mkBinder, mkValue) { case (lhs, rhs) ⇒ CaseDef(lhs, EmptyTree, rhs) }
+
+ def mkFromProductReprCase(tpe: Type): CaseDef =
+ mkTransCase(tpe, mkValue, mkBinder) { case (rhs, lhs) ⇒ CaseDef(lhs, EmptyTree, rhs) }
+
+ def mkToReprCase(tpe: Type, index: Int): CaseDef =
+ mkTransCase(tpe, mkBinder, mkValue) {
+ case (lhs, rhs) ⇒
+ CaseDef(lhs, EmptyTree, mkCoproductValue(mkElem(rhs, nameOf(tpe), tpe), index))
+ }
+
+ def mkFromReprCase(tpe: Type, index: Int): CaseDef =
+ mkTransCase(tpe, mkValue, mkBinder) {
+ case (rhs, lhs) ⇒
+ CaseDef(mkCoproductValue(lhs, index), EmptyTree, rhs)
+ }
+
+ def materializeGeneric = {
+ val genericTypeConstructor: Type = if (toLabelled) labelledGenericTpe else genericTpe
+
+ val reprTpe =
+ if (fromProduct) reprOf(fromTpe)
+ else if (toLabelled) {
+ val labelledCases = fromCtors.map(tpe ⇒ (nameOf(tpe), tpe))
+ mkUnionTpe(labelledCases)
+ } else
+ mkCoproductTpe(fromCtors)
+
+ val (toCases, fromCases) =
+ if (fromProduct) mkProductCases(mkToProductReprCase, mkFromProductReprCase)
+ else mkCases(mkToCoproductCase, mkFromCoproductCase)
+
+ mkClass(
+ appliedType(
+ genericTypeConstructor,
+ List(fromTpe)),
+ List(
+ TypeDef(Modifiers(), reprName, List(), TypeTree(reprTpe)),
+ mkTrans(toName, fromTpe, reprTpe, toCases),
+ mkTrans(fromName, reprTpe, fromTpe, fromCases)))
+ }
+
+ def materializeIdentityGeneric = {
+ def mkIdentityDef(name: TermName) = {
+ val param = newTermName("t")
+ DefDef(
+ Modifiers(),
+ name,
+ List(),
+ List(List(ValDef(Modifiers(PARAM), param, TypeTree(fromTpe), EmptyTree))),
+ TypeTree(fromTpe),
+ Ident(param))
+ }
+
+ mkClass(
+ appliedType(genericTpe, List(fromTpe)),
+ List(
+ TypeDef(Modifiers(), reprName, List(), TypeTree(fromTpe)),
+ mkIdentityDef(toName),
+ mkIdentityDef(fromName)))
+ }
+
+ def deriveInstance(deriver: Tree, tc: Type): Tree = {
+ fromSym.baseClasses.find(sym ⇒ sym != fromSym && sym.isClass && sym.asClass.isSealed) match {
+ case Some(sym) if c.inferImplicitValue(deriveCtorsTpe) == EmptyTree ⇒
+ val msg =
+ s"Attempting to derive a type class instance for class `${fromSym.name.decoded}` with " +
+ s"sealed superclass `${sym.name.decoded}`; this is most likely unintended. To silence " +
+ s"this warning, import `TypeClass.deriveConstructors`"
+
+ if (c.compilerSettings contains "-Xfatal-warnings")
+ c.error(c.enclosingPosition, msg)
+ else
+ c.warning(c.enclosingPosition, msg)
+ case _ ⇒
+ }
+
+ def mkImplicitlyAndAssign(name: TermName, typ: Type): ValDef = {
+ def mkImplicitly(typ: Type): Tree =
+ TypeApply(
+ Select(Ident(definitions.PredefModule), newTermName("implicitly")),
+ List(TypeTree(typ)))
+
+ ValDef(
+ Modifiers(LAZY),
+ name,
+ TypeTree(typ),
+ mkImplicitly(typ))
+ }
+
+ val elemTpes: List[Type] = fromCtors.flatMap(fieldsOf(_).map(_._2)).filterNot(fromTpe =:= _).distinct
+ val elemInstanceNames = List.fill(elemTpes.length)(newTermName(c.fresh("inst")))
+ val elemInstanceMap = (elemTpes zip elemInstanceNames).toMap
+ val elemInstanceDecls = (elemInstanceMap map {
+ case (tpe, name) ⇒
+ mkImplicitlyAndAssign(name, appliedType(tc, List(tpe)))
+ }).toList
+
+ val tpeInstanceName = newTermName(c.fresh())
+ val instanceMap = elemInstanceMap.mapValues(Ident(_)) + (fromTpe -> Ident(tpeInstanceName))
+
+ val reprInstance = {
+ val emptyProduct: Tree = Select(deriver, newTermName("emptyProduct"))
+ val product: Tree = Select(deriver, newTermName("product"))
+
+ val emptyCoproduct: Tree = Select(deriver, newTermName("emptyCoproduct"))
+ val coproduct: Tree = Select(deriver, newTermName("coproduct"))
+
+ def mkCompoundValue(nil: Tree, cons: Tree, items: List[(Name, Tree)]): Tree =
+ items.foldRight(nil) {
+ case ((name, instance), acc) ⇒
+ Apply(
+ cons,
+ (if (toLabelled) List(nameAsLiteral(name)) else Nil) ++ List(instance, acc))
+ }
+
+ def mkInstance(tpe: Type): Tree =
+ mkCompoundValue(
+ emptyProduct, product,
+ fieldsOf(tpe).map { case (name, tpe) ⇒ (name, instanceMap(tpe)) })
+
+ if (toProduct)
+ mkInstance(fromTpe)
+ else
+ mkCompoundValue(
+ emptyCoproduct, coproduct,
+ fromCtors.map { tpe ⇒ (tpe.typeSymbol.name, mkInstance(tpe)) })
+ }
+
+ val reprTpe =
+ if (toProduct)
+ reprOf(fromTpe)
+ else
+ mkCoproductTpe(fromCtors.map(reprOf))
+
+ val reprName = newTermName(c.fresh("inst"))
+ val reprInstanceDecl =
+ ValDef(
+ Modifiers(LAZY),
+ reprName,
+ TypeTree(appliedType(tc, List(reprTpe))),
+ reprInstance)
+
+ val toName, fromName = newTermName(c.fresh())
+
+ val tpeInstanceDecl =
+ ValDef(
+ Modifiers(LAZY),
+ tpeInstanceName,
+ TypeTree(appliedType(tc, List(fromTpe))),
+ Apply(
+ Select(deriver, newTermName("project")),
+ List(Ident(reprName), Ident(toName), Ident(fromName))))
+
+ val instanceDecls = elemInstanceDecls ::: List(reprInstanceDecl, tpeInstanceDecl)
+
+ val (toCases, fromCases) =
+ if (toProduct) mkProductCases(mkToProductReprCase, mkFromProductReprCase)
+ else mkCases(mkToReprCase, mkFromReprCase)
+
+ mkObjectSelection(
+ mkTrans(toName, fromTpe, reprTpe, toCases) :: mkTrans(fromName, reprTpe, fromTpe, fromCases) :: instanceDecls,
+ tpeInstanceName)
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/hlistconstraints.scala b/akka-parsing/src/main/scala/akka/shapeless/hlistconstraints.scala
new file mode 100644
index 0000000000..f1dca29eb9
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/hlistconstraints.scala
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2011 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import ops.hlist.Selector
+
+/**
+ * Type class witnessing that every element of `L` has `TC` as its outer type constructor.
+ */
+trait UnaryTCConstraint[L <: HList, TC[_]]
+
+object UnaryTCConstraint {
+ type *->*[TC[_]] = {
+ type λ[L <: HList] = UnaryTCConstraint[L, TC]
+ }
+
+ implicit def hnilUnaryTC[TC[_]] = new UnaryTCConstraint[HNil, TC] {}
+ implicit def hlistUnaryTC1[H, T <: HList, TC[_]](implicit utct: UnaryTCConstraint[T, TC]) =
+ new UnaryTCConstraint[TC[H]:: T, TC] {}
+
+ implicit def hlistUnaryTC2[L <: HList] = new UnaryTCConstraint[L, Id] {}
+
+ implicit def hlistUnaryTC3[H] = new UnaryTCConstraint[HNil, Const[H]#λ] {}
+ implicit def hlistUnaryTC4[H, T <: HList](implicit utct: UnaryTCConstraint[T, Const[H]#λ]) =
+ new UnaryTCConstraint[H :: T, Const[H]#λ] {}
+}
+
+/**
+ * Type class witnessing that every element of `L` is an element of `M`.
+ */
+trait BasisConstraint[L <: HList, M <: HList]
+
+object BasisConstraint {
+ type Basis[M <: HList] = {
+ type λ[L <: HList] = BasisConstraint[L, M]
+ }
+
+ implicit def hnilBasis[M <: HList] = new BasisConstraint[HNil, M] {}
+ implicit def hlistBasis[H, T <: HList, M <: HList](implicit bct: BasisConstraint[T, M], sel: Selector[M, H]) =
+ new BasisConstraint[H :: T, M] {}
+}
+
+/**
+ * Type class witnessing that every element of `L` is a subtype of `B`.
+ */
+trait LUBConstraint[L <: HList, B]
+
+object LUBConstraint {
+ type <<:[B] = {
+ type λ[L <: HList] = LUBConstraint[L, B]
+ }
+
+ implicit def hnilLUB[T] = new LUBConstraint[HNil, T] {}
+ implicit def hlistLUB[H, T <: HList, B](implicit bct: LUBConstraint[T, B], ev: H <:< B) =
+ new LUBConstraint[H :: T, B] {}
+}
+
+/**
+ * Type class witnessing that every element of `L` is of the form `FieldType[K, V]` where `K` is an element of `M`.
+ */
+trait KeyConstraint[L <: HList, M <: HList]
+
+object KeyConstraint {
+ import record._
+
+ type Keys[M <: HList] = {
+ type λ[L <: HList] = KeyConstraint[L, M]
+ }
+
+ implicit def hnilKeys[M <: HList] = new KeyConstraint[HNil, M] {}
+ implicit def hlistKeys[K, V, T <: HList, M <: HList](implicit bct: KeyConstraint[T, M], sel: Selector[M, K]) = new KeyConstraint[FieldType[K, V]:: T, M] {}
+}
+
+/**
+ * Type class witnessing that every element of `L` is of the form `FieldType[K, V]` where `V` is an element of `M`.
+ */
+trait ValueConstraint[L <: HList, M <: HList]
+
+object ValueConstraint {
+ import record._
+
+ type Values[M <: HList] = {
+ type λ[L <: HList] = ValueConstraint[L, M]
+ }
+
+ implicit def hnilValues[M <: HList] = new ValueConstraint[HNil, M] {}
+ implicit def hlistValues[K, V, T <: HList, M <: HList](implicit bct: ValueConstraint[T, M], sel: Selector[M, V]) = new ValueConstraint[FieldType[K, V]:: T, M] {}
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/hlists.scala b/akka-parsing/src/main/scala/akka/shapeless/hlists.scala
new file mode 100644
index 0000000000..799fb7a0fa
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/hlists.scala
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.annotation.tailrec
+
+/**
+ * `HList` ADT base trait.
+ *
+ * @author Miles Sabin
+ */
+sealed trait HList
+
+/**
+ * Non-empty `HList` element type.
+ *
+ * @author Miles Sabin
+ */
+final case class ::[+H, +T <: HList](head: H, tail: T) extends HList {
+ override def toString = head + " :: " + tail.toString
+}
+
+/**
+ * Empty `HList` element type.
+ *
+ * @author Miles Sabin
+ */
+sealed trait HNil extends HList {
+ def ::[H](h: H) = akka.shapeless.::(h, this)
+ override def toString = "HNil"
+}
+
+/**
+ * Empty `HList` value.
+ *
+ * @author Miles Sabin
+ */
+case object HNil extends HNil
+
+object HList {
+ import syntax.HListOps
+
+ def apply() = HNil
+
+ def apply[P <: Product, L <: HList](p: P)(implicit gen: Generic.Aux[P, L]): L = gen.to(p)
+
+ implicit def hlistOps[L <: HList](l: L): HListOps[L] = new HListOps(l)
+
+ /**
+ * Convenience aliases for HList :: and List :: allowing them to be used together within match expressions.
+ */
+ object ListCompat {
+ val :: = scala.collection.immutable.::
+ val #: = akka.shapeless.::
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/hmap.scala b/akka-parsing/src/main/scala/akka/shapeless/hmap.scala
new file mode 100644
index 0000000000..4b262a420f
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/hmap.scala
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2011 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import poly._
+
+/**
+ * Heterogenous map with type-level key/value associations that are fixed by an arbitrary
+ * relation `R`.
+ *
+ * `HMap`s extend `Poly` and hence are also polymorphic function values with type-specific
+ * cases corresponding to the map's type-level key/value associations.
+ */
+class HMap[R[_, _]](underlying: Map[Any, Any] = Map.empty) extends Poly1 {
+ def get[K, V](k: K)(implicit ev: R[K, V]): Option[V] = underlying.get(k).asInstanceOf[Option[V]]
+
+ def +[K, V](kv: (K, V))(implicit ev: R[K, V]): HMap[R] = new HMap[R](underlying + kv)
+ def -[K](k: K): HMap[R] = new HMap[R](underlying - k)
+
+ implicit def caseRel[K, V](implicit ev: R[K, V]) = Case1[this.type, K, V](get(_).get)
+}
+
+object HMap {
+ def apply[R[_, _]] = new HMapBuilder[R]
+
+ def empty[R[_, _]] = new HMap[R]
+ def empty[R[_, _]](underlying: Map[Any, Any]) = new HMap[R](underlying)
+}
+
+/**
+ * Type class witnessing the existence of a natural transformation between `K[_]` and `V[_]`.
+ *
+ * Use this trait to represent an `HMap` relation of the form `K[T]` maps to `V[T]`.
+ *
+ * @author Miles Sabin
+ */
+class ~?>[K[_], V[_]] {
+ class λ[K, V]
+}
+
+object ~?> {
+ implicit def rel[K[_], V[_]]: K ~?> V = new (K ~?> V)
+
+ implicit def witness[K[_], V[_], T](implicit rel: K ~?> V): rel.λ[K[T], V[T]] = new rel.λ[K[T], V[T]]
+}
+
diff --git a/akka-parsing/src/main/scala/akka/shapeless/lazy.scala b/akka-parsing/src/main/scala/akka/shapeless/lazy.scala
new file mode 100644
index 0000000000..4ddddde5d1
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/lazy.scala
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.language.experimental.macros
+
+import scala.reflect.macros.Context
+
+trait Lazy[T] {
+ val value: T
+}
+
+object Lazy {
+ def apply[T](t: ⇒ T) = new Lazy[T] {
+ lazy val value = t
+ }
+
+ implicit def mkLazy[T]: Lazy[T] = macro mkLazyImpl[T]
+
+ def mkLazyImpl[T: c.WeakTypeTag](c: Context): c.Expr[Lazy[T]] = {
+ import c.universe._
+ import Flag._
+
+ val pendingSuperCall = Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())
+
+ val lazySym = c.mirror.staticClass("shapeless.Lazy")
+
+ val thisLazyTypeTree =
+ AppliedTypeTree(
+ Ident(lazySym),
+ List(TypeTree(weakTypeOf[T])))
+
+ val recName = newTermName(c.fresh)
+ val className = newTypeName(c.fresh)
+ val recClass =
+ ClassDef(Modifiers(FINAL), className, List(),
+ Template(
+ List(thisLazyTypeTree),
+ emptyValDef,
+ List(
+ DefDef(
+ Modifiers(), nme.CONSTRUCTOR, List(),
+ List(List()),
+ TypeTree(),
+ Block(List(pendingSuperCall), Literal(Constant(())))),
+
+ // Implicit self-publication ties the knot
+ ValDef(Modifiers(IMPLICIT), recName, thisLazyTypeTree, This(tpnme.EMPTY)),
+
+ ValDef(Modifiers(LAZY), newTermName("value"), TypeTree(weakTypeOf[T]),
+ TypeApply(
+ Select(Ident(definitions.PredefModule), newTermName("implicitly")),
+ List(TypeTree(weakTypeOf[T])))))))
+
+ val block =
+ Block(
+ List(recClass),
+ Apply(Select(New(Ident(className)), nme.CONSTRUCTOR), List()))
+
+ c.Expr[Lazy[T]] { block }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/lenses.scala b/akka-parsing/src/main/scala/akka/shapeless/lenses.scala
new file mode 100644
index 0000000000..0ed5495dbd
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/lenses.scala
@@ -0,0 +1,156 @@
+/*
+ * Copyright (c) 2012-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import ops.hlist.{
+ At,
+ Init,
+ Last,
+ Prepend,
+ Selector,
+ ReplaceAt,
+ Replacer,
+ Tupler
+}
+import ops.record.{ Selector ⇒ RSelector, Updater }
+import record.{ FieldType, field }
+
+trait Lens[C, F] {
+ outer ⇒
+
+ type Gen[O] = LabelledGeneric.Aux[F, O]
+
+ def get(c: C): F
+ def set(c: C)(f: F): C
+ def modify(c: C)(f: F ⇒ F): C = set(c)(f(get(c)))
+
+ def compose[D](g: Lens[D, C]) = new Lens[D, F] {
+ def get(d: D): F = outer.get(g.get(d))
+ def set(d: D)(f: F): D = g.set(d)(outer.set(g.get(d))(f))
+ }
+
+ def >>[L <: HList](n: Nat)(implicit gen: Generic.Aux[F, L], lens: HListNthLens[L, n.N]) =
+ new Lens[C, lens.Elem] {
+ def get(c: C): lens.Elem = lens.get(gen.to(outer.get(c)))
+ def set(c: C)(f: lens.Elem) = outer.set(c)(gen.from(lens.set(gen.to(outer.get(c)))(f)))
+ }
+
+ def >>[Out0 <: HList: Gen, V](k: Witness)(implicit s: RSelector.Aux[Out0, k.T, V], u: Updater.Aux[Out0, FieldType[k.T, V], Out0]) =
+ new Lens[C, V] {
+ import akka.shapeless.syntax
+ val gen = implicitly[LabelledGeneric.Aux[F, Out0]]
+ def get(c: C): V = s(gen.to(outer.get(c)))
+ def set(c: C)(f: V): C = outer.set(c)(gen.from(record.recordOps(gen.to(outer.get(c))).updated(k, f)))
+ }
+
+ def ~[G](other: Lens[C, G]) = new ProductLens[C, (F, G)] {
+ def get(c: C): (F, G) = (outer.get(c), other.get(c))
+ def set(c: C)(fg: (F, G)) = other.set(outer.set(c)(fg._1))(fg._2)
+ }
+}
+
+trait ProductLens[C, P <: Product] extends Lens[C, P] {
+ outer ⇒
+ def ~[T, L <: HList, LT <: HList, Q <: Product, QL <: HList](other: Lens[C, T])(implicit genp: Generic.Aux[P, L],
+ tpp: Tupler.Aux[L, P],
+ pre: Prepend.Aux[L, T :: HNil, LT],
+ tpq: Tupler.Aux[LT, Q],
+ genq: Generic.Aux[Q, QL],
+ init: Init.Aux[QL, L],
+ last: Last.Aux[QL, T]) =
+ new ProductLens[C, Q] {
+ def get(c: C): Q = (genp.to(outer.get(c)) :+ other.get(c)).tupled
+ def set(c: C)(q: Q) = {
+ val l = genq.to(q)
+ other.set(outer.set(c)(l.init.tupled))(l.last)
+ }
+ }
+}
+
+object LensDefns {
+ def apply[C] = id[C]
+
+ object compose extends Poly2 {
+ implicit def default[A, B, C] = at[Lens[B, C], Lens[A, B]](_ compose _)
+ }
+
+ def id[C] = new Lens[C, C] {
+ def get(c: C): C = c
+ def set(c: C)(f: C): C = f
+ }
+
+ def setLens[E](e: E) = new Lens[Set[E], Boolean] {
+ def get(s: Set[E]) = s contains e
+ def set(s: Set[E])(b: Boolean) = if (b) s + e else s - e
+ }
+
+ def mapLens[K, V](k: K) = new Lens[Map[K, V], Option[V]] {
+ def get(m: Map[K, V]) = m get k
+ def set(m: Map[K, V])(ov: Option[V]) = ov match {
+ case Some(v) ⇒ m + (k -> v)
+ case None ⇒ m - k
+ }
+ }
+
+ /**
+ * The lens of an element of `L`, chosen by the element type, `U`.
+ */
+ def hlistSelectLens[L <: HList, U](implicit selector: Selector[L, U],
+ replacer: Replacer.Aux[L, U, U, (U, L)]): Lens[L, U] = new Lens[L, U] {
+ def get(l: L) = selector(l)
+ def set(l: L)(u: U) = replacer(l, u)._2
+ }
+
+ /**
+ * The lens of an element of the record type `L`, chosen by the
+ * singleton type of `k`.
+ */
+ def recordLens[L <: HList, U](k: Witness)(implicit selector: RSelector.Aux[L, k.T, U],
+ updater: Updater.Aux[L, FieldType[k.T, U], L]): Lens[L, U] = new Lens[L, U] {
+ def get(l: L) = selector(l)
+ def set(l: L)(u: U) = updater(l, field[k.T](u))
+ }
+
+ def hlistNthLens[L <: HList, N <: Nat](implicit lens: HListNthLens[L, N]) = lens.toLens
+}
+
+trait HListNthLens[L <: HList, N <: Nat] {
+ type Elem
+ def get(l: L): Elem
+ def set(l: L)(e: Elem): L
+ def toLens: Lens[L, Elem]
+}
+
+object HListNthLens {
+ implicit def hlistNthLens[L <: HList, N <: Nat, E](implicit lens: HListNthLensAux[L, N, E]) =
+ new HListNthLens[L, N] {
+ type Elem = E
+ def get(l: L): Elem = lens.get(l)
+ def set(l: L)(e: Elem): L = lens.set(l)(e)
+ def toLens: Lens[L, Elem] = lens
+ }
+}
+
+trait HListNthLensAux[L <: HList, N <: Nat, E] extends Lens[L, E]
+
+object HListNthLensAux {
+ implicit def hlistNthLens[L <: HList, N <: Nat, E](implicit atx: At.Aux[L, N, E], replace: ReplaceAt.Aux[L, N, E, (E, L)]) =
+ new HListNthLensAux[L, N, E] {
+ def get(l: L): E = l[N]
+ def set(l: L)(e: E): L = l.updatedAt[N](e)
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/nat.scala b/akka-parsing/src/main/scala/akka/shapeless/nat.scala
new file mode 100644
index 0000000000..9b34b8eadb
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/nat.scala
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.language.experimental.macros
+
+import scala.reflect.macros.Context
+
+/**
+ * Base trait for type level natural numbers.
+ *
+ * @author Miles Sabin
+ */
+trait Nat {
+ type N <: Nat
+}
+
+/**
+ * Encoding of successor.
+ *
+ * @author Miles Sabin
+ */
+case class Succ[P <: Nat]() extends Nat {
+ type N = Succ[P]
+}
+
+/**
+ * Encoding of zero.
+ *
+ * @author Miles Sabin
+ */
+class _0 extends Nat {
+ type N = _0
+}
+
+/**
+ * Type level encoding of the natural numbers.
+ *
+ * @author Miles Sabin
+ */
+object Nat extends Nats {
+ import ops.nat._
+
+ def apply(i: Int): Nat = macro NatMacros.materializeWidened
+
+ /** The natural number 0 */
+ type _0 = akka.shapeless._0
+ val _0: _0 = new _0
+
+ def toInt[N <: Nat](implicit toIntN: ToInt[N]) = toIntN()
+
+ def toInt(n: Nat)(implicit toIntN: ToInt[n.N]) = toIntN()
+
+ implicit def materialize(i: Int): Nat = macro NatMacros.materializeSingleton
+}
+
+object NatMacros {
+ def mkNatTpt(c: Context)(i: c.Expr[Int]): c.Tree = {
+ import c.universe._
+
+ val n = i.tree match {
+ case Literal(Constant(n: Int)) ⇒ n
+ case _ ⇒
+ c.abort(c.enclosingPosition, s"Expression ${i.tree} does not evaluate to an Int constant")
+ }
+
+ if (n < 0)
+ c.abort(c.enclosingPosition, s"A Nat cannot represent $n")
+
+ val succSym = typeOf[Succ[_]].typeConstructor.typeSymbol
+ val _0Sym = typeOf[_0].typeSymbol
+
+ def mkNatTpt(n: Int): Tree = {
+ if (n == 0) Ident(_0Sym)
+ else AppliedTypeTree(Ident(succSym), List(mkNatTpt(n - 1)))
+ }
+
+ mkNatTpt(n)
+ }
+
+ def materializeSingleton(c: Context)(i: c.Expr[Int]): c.Expr[Nat] = {
+ import c.universe._
+
+ val natTpt = mkNatTpt(c)(i)
+
+ val pendingSuperCall = Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())
+
+ val moduleName = newTermName(c.fresh("nat_"))
+ val moduleDef =
+ ModuleDef(Modifiers(), moduleName,
+ Template(
+ List(natTpt),
+ emptyValDef,
+ List(
+ DefDef(
+ Modifiers(), nme.CONSTRUCTOR, List(),
+ List(List()),
+ TypeTree(),
+ Block(List(pendingSuperCall), Literal(Constant(())))))))
+
+ c.Expr[Nat] {
+ Block(
+ List(moduleDef),
+ Ident(moduleName))
+ }
+ }
+
+ def materializeWidened(c: Context)(i: c.Expr[Int]): c.Expr[Nat] = {
+ import c.universe._
+ val natTpt = mkNatTpt(c)(i)
+
+ val valName = newTermName(c.fresh("nat_"))
+ val valDef =
+ ValDef(Modifiers(), valName,
+ natTpt,
+ Apply(Select(New(natTpt), nme.CONSTRUCTOR), List()))
+
+ c.Expr[Nat] {
+ Block(
+ List(valDef),
+ Ident(valName))
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/coproduct.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/coproduct.scala
new file mode 100644
index 0000000000..6ac6ae5377
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/coproduct.scala
@@ -0,0 +1,133 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+import poly._
+
+object coproduct {
+ trait Inject[C <: Coproduct, I] {
+ def apply(i: I): C
+ }
+
+ object Inject {
+ def apply[C <: Coproduct, I](implicit inject: Inject[C, I]) = inject
+
+ implicit def tlInject[H, T <: Coproduct, I](implicit tlInj: Inject[T, I]): Inject[H :+: T, I] = new Inject[H :+: T, I] {
+ def apply(i: I): H :+: T = Inr(tlInj(i))
+ }
+
+ implicit def hdInject[H, T <: Coproduct]: Inject[H :+: T, H] = new Inject[H :+: T, H] {
+ def apply(i: H): H :+: T = Inl(i)
+ }
+ }
+
+ trait Selector[C <: Coproduct, T] {
+ def apply(c: C): Option[T]
+ }
+
+ object Selector {
+ def apply[C <: Coproduct, T](implicit select: Selector[C, T]) = select
+
+ implicit def tlSelector1[H, T <: Coproduct, S](implicit st: Selector[T, S]): Selector[H :+: T, S] = new Selector[H :+: T, S] {
+ def apply(c: H :+: T): Option[S] = c match {
+ case Inl(h) ⇒ None
+ case Inr(t) ⇒ st(t)
+ }
+ }
+
+ implicit def hdSelector[H, T <: Coproduct](implicit st: Selector[T, H] = null): Selector[H :+: T, H] = new Selector[H :+: T, H] {
+ def apply(c: H :+: T): Option[H] = c match {
+ case Inl(h) ⇒ Some(h)
+ case Inr(t) ⇒ if (st != null) st(t) else None
+ }
+ }
+ }
+
+ trait Mapper[F <: Poly, C <: Coproduct] extends DepFn1[C] { type Out <: Coproduct }
+
+ object Mapper {
+ def apply[F <: Poly, C <: Coproduct](implicit mapper: Mapper[F, C]): Aux[F, C, mapper.Out] = mapper
+ def apply[C <: Coproduct](f: Poly)(implicit mapper: Mapper[f.type, C]): Aux[f.type, C, mapper.Out] = mapper
+
+ type Aux[F <: Poly, C <: Coproduct, Out0 <: Coproduct] = Mapper[F, C] { type Out = Out0 }
+
+ implicit def cnilMapper[F <: Poly]: Aux[F, CNil, CNil] = new Mapper[F, CNil] {
+ type Out = CNil
+ def apply(t: CNil): Out = t
+ }
+
+ implicit def cpMapper[F <: Poly, H, OutH, T <: Coproduct](implicit fh: Case1.Aux[F, H, OutH], mt: Mapper[F, T]): Aux[F, H :+: T, OutH :+: mt.Out] =
+ new Mapper[F, H :+: T] {
+ type Out = OutH :+: mt.Out
+ def apply(c: H :+: T): Out = c match {
+ case Inl(h) ⇒ Inl(fh(h))
+ case Inr(t) ⇒ Inr(mt(t))
+ }
+ }
+ }
+
+ trait Unifier[C <: Coproduct] extends DepFn1[C]
+
+ object Unifier {
+ def apply[C <: Coproduct](implicit unifier: Unifier[C]): Aux[C, unifier.Out] = unifier
+
+ type Aux[C <: Coproduct, Out0] = Unifier[C] { type Out = Out0 }
+
+ implicit def lstUnifier[H]: Aux[H :+: CNil, H] =
+ new Unifier[H :+: CNil] {
+ type Out = H
+ def apply(c: H :+: CNil): Out = (c: @unchecked) match {
+ case Inl(h) ⇒ h
+ }
+ }
+
+ implicit def cpUnifier[H1, H2, T <: Coproduct, TL, L, Out0 >: L](implicit u: Lub[H1, H2, L], lt: Aux[L :+: T, Out0]): Aux[H1 :+: H2 :+: T, Out0] =
+ new Unifier[H1 :+: H2 :+: T] {
+ type Out = Out0
+ def apply(c: H1 :+: H2 :+: T): Out = c match {
+ case Inl(h1) ⇒ u.left(h1)
+ case Inr(Inl(h2)) ⇒ u.right(h2)
+ case Inr(Inr(t)) ⇒ lt(Inr(t))
+ }
+ }
+ }
+
+ trait ZipWithKeys[K <: HList, V <: Coproduct] extends DepFn2[K, V] { type Out <: Coproduct }
+
+ object ZipWithKeys {
+ import akka.shapeless.record._
+
+ def apply[K <: HList, V <: Coproduct](implicit zipWithKeys: ZipWithKeys[K, V]): Aux[K, V, zipWithKeys.Out] = zipWithKeys
+
+ type Aux[K <: HList, V <: Coproduct, Out0 <: Coproduct] = ZipWithKeys[K, V] { type Out = Out0 }
+
+ implicit val cnilZipWithKeys: Aux[HNil, CNil, CNil] = new ZipWithKeys[HNil, CNil] {
+ type Out = CNil
+ def apply(k: HNil, v: CNil) = v
+ }
+
+ implicit def cpZipWithKeys[KH, VH, KT <: HList, VT <: Coproduct](implicit zipWithKeys: ZipWithKeys[KT, VT], wkh: Witness.Aux[KH]): Aux[KH :: KT, VH :+: VT, FieldType[KH, VH] :+: zipWithKeys.Out] =
+ new ZipWithKeys[KH :: KT, VH :+: VT] {
+ type Out = FieldType[KH, VH] :+: zipWithKeys.Out
+ def apply(k: KH :: KT, v: VH :+: VT): Out = v match {
+ case Inl(vh) ⇒ Inl(field[wkh.T](vh))
+ case Inr(vt) ⇒ Inr(zipWithKeys(k.tail, vt))
+ }
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/functions.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/functions.scala
new file mode 100644
index 0000000000..03905535f5
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/functions.scala
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+object function {
+ /**
+ * Type class supporting conversion of arbitrary functions to functions of a single `HList`
+ * argument.
+ *
+ * @author Miles Sabin
+ */
+ trait FnToProduct[F] extends DepFn1[F]
+
+ object FnToProduct extends FnToProductInstances {
+ def apply[F](implicit fntop: FnToProduct[F]): Aux[F, fntop.Out] = fntop
+ }
+
+ /**
+ * Type class supporting conversion of functions of a single `HList` argument to ordinary functions.
+ *
+ * @author Miles Sabin
+ */
+ trait FnFromProduct[F] extends DepFn1[F]
+
+ object FnFromProduct extends FnFromProductInstances {
+ def apply[F](implicit fnfromp: FnFromProduct[F]): Aux[F, fnfromp.Out] = fnfromp
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/hlists.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/hlists.scala
new file mode 100644
index 0000000000..a3fbf83ce3
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/hlists.scala
@@ -0,0 +1,1525 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+import scala.annotation.tailrec
+
+import poly._
+
+object hlist {
+ /**
+ * Type class witnessing that this `HList` is composite and providing access to head and tail.
+ *
+ * @author Miles Sabin
+ */
+ trait IsHCons[L <: HList] {
+ type H
+ type T <: HList
+
+ def head(l: L): H
+ def tail(l: L): T
+ }
+
+ object IsHCons {
+ def apply[L <: HList](implicit isHCons: IsHCons[L]): Aux[L, isHCons.H, isHCons.T] = isHCons
+
+ type Aux[L <: HList, H0, T0 <: HList] = IsHCons[L] { type H = H0; type T = T0 }
+ implicit def hlistIsHCons[H0, T0 <: HList]: Aux[H0 :: T0, H0, T0] =
+ new IsHCons[H0 :: T0] {
+ type H = H0
+ type T = T0
+
+ def head(l: H0 :: T0): H = l.head
+ def tail(l: H0 :: T0): T = l.tail
+ }
+ }
+
+ /**
+ * Type class witnessing that the result of wrapping each element of `HList` `L` in type constructor `F` is `Out`.
+ */
+ trait Mapped[L <: HList, F[_]] {
+ type Out <: HList
+ }
+
+ object Mapped {
+ def apply[L <: HList, F[_]](implicit mapped: Mapped[L, F]): Aux[L, F, mapped.Out] = mapped
+
+ type Aux[L <: HList, F[_], Out0 <: HList] = Mapped[L, F] { type Out = Out0 }
+
+ implicit def hnilMapped[F[_]]: Aux[HNil, F, HNil] = new Mapped[HNil, F] { type Out = HNil }
+
+ implicit def hlistIdMapped[L <: HList]: Aux[L, Id, L] = new Mapped[L, Id] { type Out = L }
+
+ implicit def hlistMapped1[H, T <: HList, F[_], OutM <: HList](implicit mt: Mapped.Aux[T, F, OutM]): Aux[H :: T, F, F[H] :: OutM] =
+ new Mapped[H :: T, F] { type Out = F[H] :: OutM }
+
+ implicit def hlistMapped2[H, T <: HList, F, OutM <: HList](implicit mt: Mapped.Aux[T, Const[F]#λ, OutM]): Aux[H :: T, Const[F]#λ, F :: OutM] =
+ new Mapped[H :: T, Const[F]#λ] { type Out = F :: OutM }
+ }
+
+ /**
+ * Type class witnessing that the result of stripping type constructor `F` off each element of `HList` `L` is `Out`.
+ */
+ trait Comapped[L <: HList, F[_]] {
+ type Out <: HList
+ }
+
+ trait LowPriorityComapped {
+ type Aux[L <: HList, F[_], Out0 <: HList] = Comapped[L, F] { type Out = Out0 }
+ implicit def hlistIdComapped[L <: HList]: Aux[L, Id, L] = new Comapped[L, Id] { type Out = L }
+ }
+
+ object Comapped extends LowPriorityComapped {
+ def apply[L <: HList, F[_]](implicit comapped: Comapped[L, F]): Aux[L, F, comapped.Out] = comapped
+
+ implicit def hnilComapped[F[_]]: Aux[HNil, F, HNil] = new Comapped[HNil, F] { type Out = HNil }
+
+ implicit def hlistComapped[H, T <: HList, F[_]](implicit mt: Comapped[T, F]): Aux[F[H] :: T, F, H :: mt.Out] =
+ new Comapped[F[H]:: T, F] { type Out = H :: mt.Out }
+ }
+
+ /**
+ * Type class witnessing that `HList`s `L1` and `L2` have elements of the form `F1[Ln]` and `F2[Ln]` respectively for all
+ * indices `n`. This implies that a natural transform `F1 ~> F2` will take a list of type `L1` onto a list of type `L2`.
+ *
+ * @author Miles Sabin
+ */
+ trait NatTRel[L1 <: HList, F1[_], L2 <: HList, F2[_]] {
+ def map(nt: F1 ~> F2, fa: L1): L2
+ }
+
+ object NatTRel {
+ def apply[L1 <: HList, F1[_], L2 <: HList, F2[_]](implicit natTRel: NatTRel[L1, F1, L2, F2]) = natTRel
+
+ implicit def hnilNatTRel1[F1[_], F2[_]] = new NatTRel[HNil, F1, HNil, F2] {
+ def map(f: F1 ~> F2, fa: HNil): HNil = HNil
+ }
+
+ implicit def hnilNatTRel2[F1[_], H2] = new NatTRel[HNil, F1, HNil, Const[H2]#λ] {
+ def map(f: F1 ~> Const[H2]#λ, fa: HNil): HNil = HNil
+ }
+
+ implicit def hlistNatTRel1[H, F1[_], F2[_], T1 <: HList, T2 <: HList](implicit nt: NatTRel[T1, F1, T2, F2]) =
+ new NatTRel[F1[H]:: T1, F1, F2[H]:: T2, F2] {
+ def map(f: F1 ~> F2, fa: F1[H] :: T1): F2[H] :: T2 = f(fa.head) :: nt.map(f, fa.tail)
+ }
+
+ implicit def hlistNatTRel2[H, F2[_], T1 <: HList, T2 <: HList](implicit nt: NatTRel[T1, Id, T2, F2]) =
+ new NatTRel[H :: T1, Id, F2[H]:: T2, F2] {
+ def map(f: Id ~> F2, fa: H :: T1): F2[H] :: T2 = f(fa.head) :: nt.map(f, fa.tail)
+ }
+
+ implicit def hlistNatTRel3[H, F1[_], T1 <: HList, T2 <: HList](implicit nt: NatTRel[T1, F1, T2, Id]) =
+ new NatTRel[F1[H]:: T1, F1, H :: T2, Id] {
+ def map(f: F1 ~> Id, fa: F1[H] :: T1): H :: T2 = f(fa.head) :: nt.map(f, fa.tail)
+ }
+
+ implicit def hlistNatTRel4[H1, F1[_], T1 <: HList, H2, T2 <: HList](implicit nt: NatTRel[T1, F1, T2, Const[H2]#λ]) =
+ new NatTRel[F1[H1]:: T1, F1, H2 :: T2, Const[H2]#λ] {
+ def map(f: F1 ~> Const[H2]#λ, fa: F1[H1] :: T1): H2 :: T2 = f(fa.head) :: nt.map(f, fa.tail)
+ }
+
+ implicit def hlistNatTRel5[H1, T1 <: HList, H2, T2 <: HList](implicit nt: NatTRel[T1, Id, T2, Const[H2]#λ]) =
+ new NatTRel[H1 :: T1, Id, H2 :: T2, Const[H2]#λ] {
+ def map(f: Id ~> Const[H2]#λ, fa: H1 :: T1): H2 :: T2 = f(fa.head) :: nt.map(f, fa.tail)
+ }
+ }
+
+ /**
+ * Type class providing minimally witnessed operations on `HList`s which can be derived from `L` by wrapping
+ * each of its elements in a type constructor.
+ */
+ trait HKernel {
+ type L <: HList
+ type Mapped[G[_]] <: HList
+ type Length <: Nat
+
+ def map[F[_], G[_]](f: F ~> G, l: Mapped[F]): Mapped[G]
+
+ def tabulate[C](from: Int)(f: Int ⇒ C): Mapped[Const[C]#λ]
+
+ def toList[C](l: Mapped[Const[C]#λ]): List[C]
+
+ def length: Int
+ }
+
+ trait HNilHKernel extends HKernel {
+ type L = HNil
+ type Mapped[G[_]] = HNil
+ type Length = _0
+
+ def map[F[_], G[_]](f: F ~> G, l: HNil): HNil = HNil
+
+ def tabulate[C](from: Int)(f: Int ⇒ C): HNil = HNil
+
+ def toList[C](l: HNil): List[C] = Nil
+
+ def length: Int = 0
+ }
+
+ case object HNilHKernel extends HNilHKernel
+
+ final case class HConsHKernel[H, T <: HKernel](tail: T) extends HKernel {
+ type L = H :: tail.L
+ type Mapped[G[_]] = G[H] :: tail.Mapped[G]
+ type Length = Succ[tail.Length]
+
+ def map[F[_], G[_]](f: F ~> G, l: F[H] :: tail.Mapped[F]): G[H] :: tail.Mapped[G] = f(l.head) :: tail.map(f, l.tail)
+
+ def tabulate[C](from: Int)(f: Int ⇒ C): C :: tail.Mapped[Const[C]#λ] = f(from) :: tail.tabulate(from + 1)(f)
+
+ def toList[C](l: C :: tail.Mapped[Const[C]#λ]): List[C] = l.head :: tail.toList(l.tail)
+
+ def length: Int = 1 + tail.length
+ }
+
+ object HKernel {
+ def apply[L <: HList](implicit mk: HKernelAux[L]): mk.Out = mk()
+ def apply[L <: HList](l: L)(implicit mk: HKernelAux[L]): mk.Out = mk()
+ }
+
+ trait HKernelAux[L <: HList] {
+ type Out <: HKernel
+ def apply(): Out
+ }
+
+ object HKernelAux {
+ implicit def mkHNilHKernel = new HKernelAux[HNil] {
+ type Out = HNilHKernel
+ def apply() = HNilHKernel
+ }
+
+ implicit def mkHListHKernel[H, T <: HList](implicit ct: HKernelAux[T]) = new HKernelAux[H :: T] {
+ type Out = HConsHKernel[H, ct.Out]
+ def apply() = HConsHKernel[H, ct.Out](ct())
+ }
+ }
+
+ /**
+ * Type class computing the coproduct type corresponding to this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Union[L <: HList] { type Out <: Coproduct }
+
+ object Union {
+ def apply[L <: HList](implicit union: Union[L]): Aux[L, union.Out] = union
+
+ type Aux[L <: HList, Out0 <: Coproduct] = Union[L] { type Out = Out0 }
+
+ implicit def hnilUnion[H]: Aux[HNil, CNil] =
+ new Union[HNil] {
+ type Out = CNil
+ }
+
+ implicit def hlistUnion[H, T <: HList](implicit ut: Union[T]): Aux[H :: T, H :+: ut.Out] =
+ new Union[H :: T] {
+ type Out = H :+: ut.Out
+ }
+ }
+
+ /**
+ * Type class supporting computing the type-level Nat corresponding to the length of this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Length[L <: HList] extends DepFn0 { type Out <: Nat }
+
+ object Length {
+ def apply[L <: HList](implicit length: Length[L]): Aux[L, length.Out] = length
+
+ import akka.shapeless.nat._
+ type Aux[L <: HList, N <: Nat] = Length[L] { type Out = N }
+ implicit def hnilLength: Aux[HNil, _0] = new Length[HNil] {
+ type Out = _0
+ def apply(): Out = _0
+ }
+
+ implicit def hlistLength[H, T <: HList, N <: Nat](implicit lt: Aux[T, N], sn: Witness.Aux[Succ[N]]): Aux[H :: T, Succ[N]] = new Length[H :: T] {
+ type Out = Succ[N]
+ def apply(): Out = sn.value
+ }
+ }
+
+ /**
+ * Type class supporting mapping a higher ranked function over this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Mapper[HF, In <: HList] extends DepFn1[In] { type Out <: HList }
+
+ object Mapper {
+ def apply[F, L <: HList](implicit mapper: Mapper[F, L]): Aux[F, L, mapper.Out] = mapper
+
+ type Aux[HF, In <: HList, Out0 <: HList] = Mapper[HF, In] { type Out = Out0 }
+
+ implicit def hnilMapper1[HF]: Aux[HF, HNil, HNil] =
+ new Mapper[HF, HNil] {
+ type Out = HNil
+ def apply(l: HNil): Out = HNil
+ }
+
+ implicit def hlistMapper1[HF <: Poly, InH, InT <: HList](implicit hc: Case1[HF, InH], mt: Mapper[HF, InT]): Aux[HF, InH :: InT, hc.Result :: mt.Out] =
+ new Mapper[HF, InH :: InT] {
+ type Out = hc.Result :: mt.Out
+ def apply(l: InH :: InT): Out = hc(l.head) :: mt(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting flatmapping a higher ranked function over this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait FlatMapper[HF, In <: HList] extends DepFn1[In] { type Out <: HList }
+
+ object FlatMapper {
+ def apply[F, L <: HList](implicit mapper: FlatMapper[F, L]): Aux[F, L, mapper.Out] = mapper
+
+ type Aux[HF, In <: HList, Out0 <: HList] = FlatMapper[HF, In] { type Out = Out0 }
+
+ implicit def hnilFlatMapper1[HF]: Aux[HF, HNil, HNil] =
+ new FlatMapper[HF, HNil] {
+ type Out = HNil
+ def apply(l: HNil): Out = HNil
+ }
+
+ implicit def hlistFlatMapper1[HF <: Poly, InH, OutH <: HList, InT <: HList, OutT <: HList, Out0 <: HList](implicit hc: Case1.Aux[HF, InH, OutH],
+ mt: FlatMapper.Aux[HF, InT, OutT],
+ prepend: Prepend.Aux[OutH, OutT, Out0]): Aux[HF, InH :: InT, Out0] =
+ new FlatMapper[HF, InH :: InT] {
+ type Out = Out0
+ def apply(l: InH :: InT): Out = prepend(hc(l.head), mt(l.tail))
+ }
+ }
+
+ /**
+ * Type class supporting mapping a constant valued function over this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait ConstMapper[C, L <: HList] extends DepFn2[C, L] { type Out <: HList }
+
+ object ConstMapper {
+ def apply[C, L <: HList](implicit mapper: ConstMapper[C, L]): Aux[C, L, mapper.Out] = mapper
+
+ type Aux[C, L <: HList, Out0 <: HList] = ConstMapper[C, L] { type Out = Out0 }
+
+ implicit def hnilConstMapper[C]: Aux[C, HNil, HNil] =
+ new ConstMapper[C, HNil] {
+ type Out = HNil
+ def apply(c: C, l: HNil): Out = l
+ }
+
+ implicit def hlistConstMapper[H, T <: HList, C](implicit mct: ConstMapper[C, T]): Aux[C, H :: T, C :: mct.Out] =
+ new ConstMapper[C, H :: T] {
+ type Out = C :: mct.Out
+ def apply(c: C, l: H :: T): Out = c :: mct(c, l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting mapping a polymorphic function over this `HList` and then folding the result using a
+ * monomorphic function value.
+ *
+ * @author Miles Sabin
+ */
+ trait MapFolder[L <: HList, R, HF] {
+ def apply(l: L, in: R, op: (R, R) ⇒ R): R
+ }
+
+ object MapFolder {
+ def apply[L <: HList, R, F](implicit folder: MapFolder[L, R, F]) = folder
+
+ implicit def hnilMapFolder[R, HF]: MapFolder[HNil, R, HF] = new MapFolder[HNil, R, HF] {
+ def apply(l: HNil, in: R, op: (R, R) ⇒ R): R = in
+ }
+
+ implicit def hlistMapFolder[H, T <: HList, R, HF <: Poly](implicit hc: Case1.Aux[HF, H, R], tf: MapFolder[T, R, HF]): MapFolder[H :: T, R, HF] =
+ new MapFolder[H :: T, R, HF] {
+ def apply(l: H :: T, in: R, op: (R, R) ⇒ R): R = op(hc(l.head), tf(l.tail, in, op))
+ }
+ }
+
+ /**
+ * Type class supporting left-folding a polymorphic binary function over this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait LeftFolder[L <: HList, In, HF] extends DepFn2[L, In]
+
+ object LeftFolder {
+ def apply[L <: HList, In, F](implicit folder: LeftFolder[L, In, F]): Aux[L, In, F, folder.Out] = folder
+
+ type Aux[L <: HList, In, HF, Out0] = LeftFolder[L, In, HF] { type Out = Out0 }
+
+ implicit def hnilLeftFolder[In, HF]: Aux[HNil, In, HF, In] =
+ new LeftFolder[HNil, In, HF] {
+ type Out = In
+ def apply(l: HNil, in: In): Out = in
+ }
+
+ implicit def hlistLeftFolder[H, T <: HList, In, HF, OutH](implicit f: Case2.Aux[HF, In, H, OutH], ft: LeftFolder[T, OutH, HF]): Aux[H :: T, In, HF, ft.Out] =
+ new LeftFolder[H :: T, In, HF] {
+ type Out = ft.Out
+ def apply(l: H :: T, in: In): Out = ft(l.tail, f(in, l.head))
+ }
+ }
+
+ /**
+ * Type class supporting right-folding a polymorphic binary function over this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait RightFolder[L <: HList, In, HF] extends DepFn2[L, In]
+
+ object RightFolder {
+ def apply[L <: HList, In, F](implicit folder: RightFolder[L, In, F]): Aux[L, In, F, folder.Out] = folder
+
+ type Aux[L <: HList, In, HF, Out0] = RightFolder[L, In, HF] { type Out = Out0 }
+
+ implicit def hnilRightFolder[In, HF]: Aux[HNil, In, HF, In] =
+ new RightFolder[HNil, In, HF] {
+ type Out = In
+ def apply(l: HNil, in: In): Out = in
+ }
+
+ implicit def hlistRightFolder[H, T <: HList, In, HF, OutT](implicit ft: RightFolder.Aux[T, In, HF, OutT], f: Case2[HF, H, OutT]): Aux[H :: T, In, HF, f.Result] =
+ new RightFolder[H :: T, In, HF] {
+ type Out = f.Result
+ def apply(l: H :: T, in: In): Out = f(l.head, ft(l.tail, in))
+ }
+ }
+
+ /**
+ * Type class supporting left-reducing a polymorphic binary function over this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait LeftReducer[L <: HList, HF] extends DepFn1[L]
+
+ object LeftReducer {
+ def apply[L <: HList, F](implicit reducer: LeftReducer[L, F]): Aux[L, F, reducer.Out] = reducer
+
+ type Aux[L <: HList, HF, Out0] = LeftReducer[L, HF] { type Out = Out0 }
+ implicit def leftReducer[H, T <: HList, HF](implicit folder: LeftFolder[T, H, HF]): Aux[H :: T, HF, folder.Out] =
+ new LeftReducer[H :: T, HF] {
+ type Out = folder.Out
+ def apply(l: H :: T): Out = folder.apply(l.tail, l.head)
+ }
+ }
+
+ /**
+ * Type class supporting right-reducing a polymorphic binary function over this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait RightReducer[L <: HList, HF] extends DepFn1[L]
+
+ object RightReducer {
+ def apply[L <: HList, F](implicit reducer: RightReducer[L, F]): Aux[L, F, reducer.Out] = reducer
+
+ type Aux[L <: HList, HF, Out0] = RightReducer[L, HF] { type Out = Out0 }
+
+ implicit def hsingleRightReducer[H, HF]: Aux[H :: HNil, HF, H] =
+ new RightReducer[H :: HNil, HF] {
+ type Out = H
+ def apply(l: H :: HNil): Out = l.head
+ }
+
+ implicit def hlistRightReducer[H, T <: HList, HF, OutT](implicit rt: RightReducer.Aux[T, HF, OutT], f: Case2[HF, H, OutT]): Aux[H :: T, HF, f.Result] =
+ new RightReducer[H :: T, HF] {
+ type Out = f.Result
+ def apply(l: H :: T): Out = f(l.head, rt(l.tail))
+ }
+ }
+
+ /**
+ * Type class supporting unification of this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Unifier[L <: HList] extends DepFn1[L] { type Out <: HList }
+
+ object Unifier {
+ def apply[L <: HList](implicit unifier: Unifier[L]): Aux[L, unifier.Out] = unifier
+
+ type Aux[L <: HList, Out0 <: HList] = Unifier[L] { type Out = Out0 }
+
+ implicit val hnilUnifier: Aux[HNil, HNil] = new Unifier[HNil] {
+ type Out = HNil
+ def apply(l: HNil): Out = l
+ }
+
+ implicit def hsingleUnifier[T]: Aux[T :: HNil, T :: HNil] =
+ new Unifier[T :: HNil] {
+ type Out = T :: HNil
+ def apply(l: T :: HNil): Out = l
+ }
+
+ implicit def hlistUnifier[H1, H2, L, T <: HList](implicit u: Lub[H1, H2, L], lt: Unifier[L :: T]): Aux[H1 :: H2 :: T, L :: lt.Out] =
+ new Unifier[H1 :: H2 :: T] {
+ type Out = L :: lt.Out
+ def apply(l: H1 :: H2 :: T): Out = u.left(l.head) :: lt(u.right(l.tail.head) :: l.tail.tail)
+ }
+ }
+
+ /**
+ * Type class supporting unification of all elements that are subtypes of `B` in this `HList` to `B`, with all other
+ * elements left unchanged.
+ *
+ * @author Travis Brown
+ */
+ trait SubtypeUnifier[L <: HList, B] extends DepFn1[L] { type Out <: HList }
+
+ object SubtypeUnifier {
+ def apply[L <: HList, B](implicit unifier: SubtypeUnifier[L, B]): Aux[L, B, unifier.Out] = unifier
+
+ type Aux[L <: HList, B, Out0 <: HList] = SubtypeUnifier[L, B] { type Out = Out0 }
+
+ implicit def hnilSubtypeUnifier[B]: Aux[HNil, B, HNil] =
+ new SubtypeUnifier[HNil, B] {
+ type Out = HNil
+ def apply(l: HNil): Out = l
+ }
+
+ implicit def hlistSubtypeUnifier1[H, T <: HList, B](implicit st: H <:< B, sut: SubtypeUnifier[T, B]): Aux[H :: T, B, B :: sut.Out] =
+ new SubtypeUnifier[H :: T, B] {
+ type Out = B :: sut.Out
+ def apply(l: H :: T): Out = st(l.head) :: sut(l.tail)
+ }
+
+ implicit def hlistSubtypeUnifier2[H, T <: HList, B](implicit nst: H <:!< B, sut: SubtypeUnifier[T, B]): Aux[H :: T, B, H :: sut.Out] =
+ new SubtypeUnifier[H :: T, B] {
+ type Out = H :: sut.Out
+ def apply(l: H :: T): Out = l.head :: sut(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting conversion of this `HList` to an ordinary `List` with elements typed as the least upper bound
+ * of the types of the elements of this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait ToList[-L <: HList, Lub] {
+ def apply(l: L): List[Lub]
+ }
+
+ trait LowPriorityToList {
+ implicit def hlistToListAny[L <: HList]: ToList[L, Any] =
+ new ToList[L, Any] {
+ type Out = List[Any]
+ val b = scala.collection.mutable.ListBuffer.empty[Any]
+
+ def apply(l: L): Out = {
+ @tailrec
+ def loop(l: HList): Unit = l match {
+ case hd :: tl ⇒
+ b += hd; loop(tl)
+ case _ ⇒
+ }
+ loop(l)
+ b.toList
+ }
+ }
+ }
+
+ object ToList extends LowPriorityToList {
+ def apply[L <: HList, Lub](implicit toList: ToList[L, Lub]) = toList
+
+ implicit def hnilToList[T]: ToList[HNil, T] =
+ new ToList[HNil, T] {
+ type Out = List[T]
+ def apply(l: HNil): Out = Nil
+ }
+
+ implicit def hsingleToList[T]: ToList[T :: HNil, T] =
+ new ToList[T :: HNil, T] {
+ type Out = List[T]
+ def apply(l: T :: HNil): Out = List(l.head)
+ }
+
+ implicit def hlistToList[H1, H2, T <: HList, L](implicit u: Lub[H1, H2, L], ttl: ToList[H2 :: T, L]): ToList[H1 :: H2 :: T, L] =
+ new ToList[H1 :: H2 :: T, L] {
+ type Out = List[L]
+ def apply(l: H1 :: H2 :: T): Out = u.left(l.head) :: ttl(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting conversion of this `HList` to an `Array` with elements typed as the least upper bound
+ * of the types of the elements of this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait ToArray[-L <: HList, Lub] {
+ def apply(len: Int, l: L, i: Int): Array[Lub]
+ }
+
+ trait LowPriorityToArray {
+ implicit def hlistToArrayAnyRef[L <: HList]: ToArray[L, Any] =
+ new ToArray[L, Any] {
+ def apply(len: Int, l: L, i: Int): Array[Any] = {
+ val arr = Array[Any](len)
+
+ @tailrec
+ def loop(l: HList, i: Int): Unit = l match {
+ case hd :: tl ⇒
+ arr(i) = hd; loop(tl, i + 1)
+ case _ ⇒
+ }
+ loop(l, 0)
+ arr
+ }
+ }
+ }
+
+ object ToArray extends LowPriorityToArray {
+ def apply[L <: HList, Lub](implicit toArray: ToArray[L, Lub]) = toArray
+
+ import scala.reflect.ClassTag
+
+ implicit def hnilToArray[T: ClassTag]: ToArray[HNil, T] =
+ new ToArray[HNil, T] {
+ def apply(len: Int, l: HNil, i: Int) = Array.ofDim[T](len)
+ }
+
+ implicit def hsingleToArray[T: ClassTag]: ToArray[T :: HNil, T] =
+ new ToArray[T :: HNil, T] {
+ def apply(len: Int, l: T :: HNil, i: Int) = {
+ val arr = Array.ofDim[T](len)
+ arr(i) = l.head
+ arr
+ }
+ }
+
+ implicit def hlistToArray[H1, H2, T <: HList, L](implicit u: Lub[H1, H2, L], tta: ToArray[H2 :: T, L]): ToArray[H1 :: H2 :: T, L] =
+ new ToArray[H1 :: H2 :: T, L] {
+ def apply(len: Int, l: H1 :: H2 :: T, i: Int) = {
+ val arr = tta(len, l.tail, i + 1)
+ arr(i) = u.left(l.head)
+ arr
+ }
+ }
+ }
+
+ /**
+ * Type class supporting conversion of this `HList` to a tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait Tupler[L <: HList] extends DepFn1[L]
+
+ object Tupler extends TuplerInstances {
+ def apply[L <: HList](implicit tupler: Tupler[L]): Aux[L, tupler.Out] = tupler
+
+ implicit val hnilTupler: Aux[HNil, Unit] =
+ new Tupler[HNil] {
+ type Out = Unit
+ def apply(l: HNil): Out = ()
+ }
+ }
+
+ /**
+ * Type class supporting access to the last element of this `HList`. Available only if this `HList` has at least one
+ * element.
+ *
+ * @author Miles Sabin
+ */
+ trait Last[L <: HList] extends DepFn1[L]
+
+ object Last {
+ def apply[L <: HList](implicit last: Last[L]): Aux[L, last.Out] = last
+
+ type Aux[L <: HList, Out0] = Last[L] { type Out = Out0 }
+
+ implicit def hsingleLast[H]: Aux[H :: HNil, H] =
+ new Last[H :: HNil] {
+ type Out = H
+ def apply(l: H :: HNil): Out = l.head
+ }
+
+ implicit def hlistLast[H, T <: HList](implicit lt: Last[T]): Aux[H :: T, lt.Out] =
+ new Last[H :: T] {
+ type Out = lt.Out
+ def apply(l: H :: T): Out = lt(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting access to all but the last element of this `HList`. Available only if this `HList` has at
+ * least one element.
+ *
+ * @author Miles Sabin
+ */
+ trait Init[L <: HList] extends DepFn1[L] { type Out <: HList }
+
+ object Init {
+ def apply[L <: HList](implicit init: Init[L]): Aux[L, init.Out] = init
+
+ type Aux[L <: HList, Out0 <: HList] = Init[L] { type Out = Out0 }
+
+ implicit def hsingleInit[H]: Aux[H :: HNil, HNil] =
+ new Init[H :: HNil] {
+ type Out = HNil
+ def apply(l: H :: HNil): Out = HNil
+ }
+
+ implicit def hlistInit[H, T <: HList, OutH, OutT <: HList](implicit it: Init[T]): Aux[H :: T, H :: it.Out] =
+ new Init[H :: T] {
+ type Out = H :: it.Out
+ def apply(l: H :: T): Out = l.head :: it(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting access to the first element of this `HList` of type `U`. Available only if this `HList`
+ * contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait Selector[L <: HList, U] extends DepFn1[L] { type Out = U }
+
+ object Selector {
+ def apply[L <: HList, U](implicit selector: Selector[L, U]): Aux[L, U] = selector
+
+ type Aux[L <: HList, U] = Selector[L, U]
+
+ implicit def hlistSelect1[H, T <: HList]: Aux[H :: T, H] =
+ new Selector[H :: T, H] {
+ def apply(l: H :: T) = l.head
+ }
+
+ implicit def hlistSelect[H, T <: HList, U](implicit st: Selector[T, U]): Aux[H :: T, U] =
+ new Selector[H :: T, U] {
+ def apply(l: H :: T) = st(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting access to the all elements of this `HList` of type `U`.
+ *
+ * @author Alois Cochard
+ */
+ trait Filter[L <: HList, U] extends DepFn1[L] { type Out <: HList }
+
+ object Filter {
+ def apply[L <: HList, U](implicit filter: Filter[L, U]): Aux[L, U, filter.Out] = filter
+
+ type Aux[L <: HList, U, Out0 <: HList] = Filter[L, U] { type Out = Out0 }
+
+ implicit def hlistFilterHNil[L <: HList, U]: Aux[HNil, U, HNil] =
+ new Filter[HNil, U] {
+ type Out = HNil
+ def apply(l: HNil): Out = HNil
+ }
+
+ implicit def hlistFilter1[L <: HList, H](implicit f: Filter[L, H]): Aux[H :: L, H, H :: f.Out] =
+ new Filter[H :: L, H] {
+ type Out = H :: f.Out
+ def apply(l: H :: L): Out = l.head :: f(l.tail)
+ }
+
+ implicit def hlistFilter2[H, L <: HList, U](implicit f: Filter[L, U], e: U =:!= H): Aux[H :: L, U, f.Out] =
+ new Filter[H :: L, U] {
+ type Out = f.Out
+ def apply(l: H :: L): Out = f(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting access to the all elements of this `HList` of type different than `U`.
+ *
+ * @author Alois Cochard
+ */
+ trait FilterNot[L <: HList, U] extends DepFn1[L] { type Out <: HList }
+
+ object FilterNot {
+ def apply[L <: HList, U](implicit filter: FilterNot[L, U]): Aux[L, U, filter.Out] = filter
+
+ type Aux[L <: HList, U, Out0 <: HList] = FilterNot[L, U] { type Out = Out0 }
+
+ implicit def hlistFilterNotHNil[L <: HList, U]: Aux[HNil, U, HNil] =
+ new FilterNot[HNil, U] {
+ type Out = HNil
+ def apply(l: HNil): Out = HNil
+ }
+
+ implicit def hlistFilterNot1[L <: HList, H](implicit f: FilterNot[L, H]): Aux[H :: L, H, f.Out] =
+ new FilterNot[H :: L, H] {
+ type Out = f.Out
+ def apply(l: H :: L): Out = f(l.tail)
+ }
+
+ implicit def hlistFilterNot2[H, L <: HList, U, Out <: HList](implicit f: FilterNot[L, U], e: U =:!= H): Aux[H :: L, U, H :: f.Out] =
+ new FilterNot[H :: L, U] {
+ type Out = H :: f.Out
+ def apply(l: H :: L): Out = l.head :: f(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting removal of an element from this `HList`. Available only if this `HList` contains an
+ * element of type `U`.
+ *
+ * @author Stacy Curl
+ */
+ trait Remove[L <: HList, E] extends DepFn1[L]
+
+ object Remove {
+ def apply[L <: HList, E](implicit remove: Remove[L, E]): Aux[L, E, remove.Out] = remove
+
+ type Aux[L <: HList, E, Out0] = Remove[L, E] { type Out = Out0 }
+
+ implicit def hlistRemove1[H, T <: HList]: Aux[H :: T, H, (H, T)] =
+ new Remove[H :: T, H] {
+ type Out = (H, T)
+ def apply(l: H :: T): Out = (l.head, l.tail)
+ }
+
+ implicit def hlistRemove[H, T <: HList, E, OutT <: HList](implicit r: Aux[T, E, (E, OutT)]): Aux[H :: T, E, (E, H :: OutT)] =
+ new Remove[H :: T, E] {
+ type Out = (E, H :: OutT)
+ def apply(l: H :: T): Out = {
+ val (e, tail) = r(l.tail)
+ (e, l.head :: tail)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting removal of a sublist from this `HList`. Available only if this `HList` contains a
+ * sublist of type `SL`.
+ *
+ * The elements of `SL` do not have to be contiguous in this `HList`.
+ *
+ * @author Stacy Curl
+ */
+ trait RemoveAll[L <: HList, SL <: HList] extends DepFn1[L]
+
+ object RemoveAll {
+ def apply[L <: HList, SL <: HList](implicit remove: RemoveAll[L, SL]): Aux[L, SL, remove.Out] = remove
+
+ type Aux[L <: HList, SL <: HList, Out0] = RemoveAll[L, SL] { type Out = Out0 }
+
+ implicit def hlistRemoveAllNil[L <: HList]: Aux[L, HNil, (HNil, L)] =
+ new RemoveAll[L, HNil] {
+ type Out = (HNil, L)
+ def apply(l: L): Out = (HNil, l)
+ }
+
+ implicit def hlistRemoveAll[L <: HList, E, RemE <: HList, Rem <: HList, SLT <: HList](implicit rt: Remove.Aux[L, E, (E, RemE)], st: Aux[RemE, SLT, (SLT, Rem)]): Aux[L, E :: SLT, (E :: SLT, Rem)] =
+ new RemoveAll[L, E :: SLT] {
+ type Out = (E :: SLT, Rem)
+ def apply(l: L): Out = {
+ val (e, rem) = rt(l)
+ val (sl, left) = st(rem)
+ (e :: sl, left)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting replacement of the first element of type U from this `HList` with an element of type V.
+ * Available only if this `HList` contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait Replacer[L <: HList, U, V] extends DepFn2[L, V]
+
+ object Replacer {
+ def apply[L <: HList, U, V](implicit replacer: Replacer[L, U, V]): Aux[L, U, V, replacer.Out] = replacer
+
+ type Aux[L <: HList, U, V, Out0] = Replacer[L, U, V] { type Out = Out0 }
+
+ implicit def hlistReplacer1[T <: HList, U, V]: Aux[U :: T, U, V, (U, V :: T)] =
+ new Replacer[U :: T, U, V] {
+ type Out = (U, V :: T)
+ def apply(l: U :: T, v: V): Out = (l.head, v :: l.tail)
+ }
+
+ implicit def hlistReplacer2[H, T <: HList, U, V, OutT <: HList](implicit ut: Aux[T, U, V, (U, OutT)]): Aux[H :: T, U, V, (U, H :: OutT)] =
+ new Replacer[H :: T, U, V] {
+ type Out = (U, H :: OutT)
+ def apply(l: H :: T, v: V): Out = {
+ val (u, outT) = ut(l.tail, v)
+ (u, l.head :: outT)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting replacement of the Nth element of this `HList` with an element of type V. Available only if
+ * this `HList` contains at least N elements.
+ *
+ * @author Miles Sabin
+ */
+ trait ReplaceAt[L <: HList, N <: Nat, V] extends DepFn2[L, V]
+
+ object ReplaceAt {
+ def apply[L <: HList, N <: Nat, V](implicit replacer: ReplaceAt[L, N, V]): Aux[L, N, V, replacer.Out] = replacer
+
+ type Aux[L <: HList, N <: Nat, V, Out0] = ReplaceAt[L, N, V] { type Out = Out0 }
+
+ implicit def hlistReplaceAt1[H, T <: HList, V]: Aux[H :: T, _0, V, (H, V :: T)] =
+ new ReplaceAt[H :: T, _0, V] {
+ type Out = (H, V :: T)
+ def apply(l: H :: T, v: V): Out = (l.head, v :: l.tail)
+ }
+
+ implicit def hlistReplaceAt2[H, T <: HList, N <: Nat, U, V, Out0 <: HList](implicit ut: Aux[T, N, V, (U, Out0)]): Aux[H :: T, Succ[N], V, (U, H :: Out0)] =
+ new ReplaceAt[H :: T, Succ[N], V] {
+ type Out = (U, H :: Out0)
+ def apply(l: H :: T, v: V): Out = {
+ val (u, outT) = ut(l.tail, v)
+ (u, l.head :: outT)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting access to the ''nth'' element of this `HList`. Available only if this `HList` has at least
+ * ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait At[L <: HList, N <: Nat] extends DepFn1[L]
+
+ object At {
+ def apply[L <: HList, N <: Nat](implicit at: At[L, N]): Aux[L, N, at.Out] = at
+
+ type Aux[L <: HList, N <: Nat, Out0] = At[L, N] { type Out = Out0 }
+
+ implicit def hlistAtZero[H, T <: HList]: Aux[H :: T, _0, H] =
+ new At[H :: T, _0] {
+ type Out = H
+ def apply(l: H :: T): Out = l.head
+ }
+
+ implicit def hlistAtN[H, T <: HList, N <: Nat](implicit att: At[T, N]): Aux[H :: T, Succ[N], att.Out] =
+ new At[H :: T, Succ[N]] {
+ type Out = att.Out
+ def apply(l: H :: T): Out = att(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting removal of the first ''n'' elements of this `HList`. Available only if this `HList` has at
+ * least ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait Drop[L <: HList, N <: Nat] extends DepFn1[L] { type Out <: HList }
+
+ object Drop {
+ def apply[L <: HList, N <: Nat](implicit drop: Drop[L, N]): Aux[L, N, drop.Out] = drop
+
+ type Aux[L <: HList, N <: Nat, Out0 <: HList] = Drop[L, N] { type Out = Out0 }
+
+ implicit def hlistDrop1[L <: HList]: Aux[L, _0, L] =
+ new Drop[L, _0] {
+ type Out = L
+ def apply(l: L): Out = l
+ }
+
+ implicit def hlistDrop2[H, T <: HList, N <: Nat](implicit dt: Drop[T, N]): Aux[H :: T, Succ[N], dt.Out] =
+ new Drop[H :: T, Succ[N]] {
+ type Out = dt.Out
+ def apply(l: H :: T): Out = dt(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting retrieval of the first ''n'' elements of this `HList`. Available only if this `HList` has at
+ * least ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait Take[L <: HList, N <: Nat] extends DepFn1[L] { type Out <: HList }
+
+ object Take {
+ def apply[L <: HList, N <: Nat](implicit take: Take[L, N]): Aux[L, N, take.Out] = take
+
+ type Aux[L <: HList, N <: Nat, Out0 <: HList] = Take[L, N] { type Out = Out0 }
+
+ implicit def hlistTake1[L <: HList]: Aux[L, _0, HNil] =
+ new Take[L, _0] {
+ type Out = HNil
+ def apply(l: L): Out = HNil
+ }
+
+ implicit def hlistTake2[H, T <: HList, N <: Nat, Out <: HList](implicit tt: Take[T, N]): Aux[H :: T, Succ[N], H :: tt.Out] =
+ new Take[H :: T, Succ[N]] {
+ type Out = H :: tt.Out
+ def apply(l: H :: T): Out = l.head :: tt(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting splitting this `HList` at the ''nth'' element returning the prefix and suffix as a pair.
+ * Available only if this `HList` has at least ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait Split[L <: HList, N <: Nat] extends DepFn1[L]
+
+ object Split {
+ def apply[L <: HList, N <: Nat](implicit split: Split[L, N]): Aux[L, N, split.Out] = split
+
+ type Aux[L <: HList, N <: Nat, Out0] = Split[L, N] { type Out = Out0 }
+
+ implicit def split[L <: HList, N <: Nat, P <: HList, S <: HList](implicit split: Split0[HNil, L, N, P, S]): Aux[L, N, (P, S)] =
+ new Split[L, N] {
+ type Out = (P, S)
+ def apply(l: L): Out = split(HNil, l)
+ }
+
+ trait Split0[AccP <: HList, AccS <: HList, N <: Nat, P <: HList, S <: HList] {
+ def apply(accP: AccP, accS: AccS): (P, S)
+ }
+
+ object Split0 {
+ implicit def hlistSplit1[P <: HList, S <: HList]: Split0[P, S, _0, P, S] =
+ new Split0[P, S, _0, P, S] {
+ def apply(accP: P, accS: S): (P, S) = (accP, accS)
+ }
+
+ implicit def hlistSplit2[AccP <: HList, AccSH, AccST <: HList, N <: Nat, P <: HList, S <: HList](implicit st: Split0[AccP, AccST, N, P, S]): Split0[AccP, AccSH :: AccST, Succ[N], AccSH :: P, S] =
+ new Split0[AccP, AccSH :: AccST, Succ[N], AccSH :: P, S] {
+ def apply(accP: AccP, accS: AccSH :: AccST): (AccSH :: P, S) =
+ st(accP, accS.tail) match { case (prefix, suffix) ⇒ (accS.head :: prefix, suffix) }
+ }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this `HList` at the ''nth'' element returning the reverse prefix and suffix as a
+ * pair. Available only if this `HList` has at least ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait ReverseSplit[L <: HList, N <: Nat] extends DepFn1[L]
+
+ object ReverseSplit {
+ def apply[L <: HList, N <: Nat](implicit split: ReverseSplit[L, N]): Aux[L, N, split.Out] = split
+
+ type Aux[L <: HList, N <: Nat, Out0] = ReverseSplit[L, N] { type Out = Out0 }
+
+ implicit def reverseSplit[L <: HList, N <: Nat, P <: HList, S <: HList](implicit split: ReverseSplit0[HNil, L, N, P, S]): Aux[L, N, (P, S)] =
+ new ReverseSplit[L, N] {
+ type Out = (P, S)
+ def apply(l: L): Out = split(HNil, l)
+ }
+
+ trait ReverseSplit0[AccP <: HList, AccS <: HList, N <: Nat, P, S] {
+ def apply(accP: AccP, accS: AccS): (P, S)
+ }
+
+ object ReverseSplit0 {
+ implicit def hlistReverseSplit1[P <: HList, S <: HList]: ReverseSplit0[P, S, _0, P, S] =
+ new ReverseSplit0[P, S, _0, P, S] {
+ def apply(accP: P, accS: S): (P, S) = (accP, accS)
+ }
+
+ implicit def hlistReverseSplit2[AccP <: HList, AccSH, AccST <: HList, N <: Nat, P, S](implicit st: ReverseSplit0[AccSH :: AccP, AccST, N, P, S]): ReverseSplit0[AccP, AccSH :: AccST, Succ[N], P, S] =
+ new ReverseSplit0[AccP, AccSH :: AccST, Succ[N], P, S] {
+ def apply(accP: AccP, accS: AccSH :: AccST): (P, S) = st(accS.head :: accP, accS.tail)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this `HList` at the first occurence of an element of type `U` returning the prefix
+ * and suffix as a pair. Available only if this `HList` contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait SplitLeft[L <: HList, U] extends DepFn1[L]
+
+ object SplitLeft {
+ def apply[L <: HList, U](implicit split: SplitLeft[L, U]): Aux[L, U, split.Out] = split
+
+ type Aux[L <: HList, U, Out0] = SplitLeft[L, U] { type Out = Out0 }
+
+ implicit def splitLeft[L <: HList, U, P <: HList, S <: HList](implicit splitLeft: SplitLeft0[HNil, L, U, P, S]): Aux[L, U, (P, S)] =
+ new SplitLeft[L, U] {
+ type Out = (P, S)
+ def apply(l: L): Out = splitLeft(HNil, l)
+ }
+
+ trait SplitLeft0[AccP <: HList, AccS <: HList, U, P <: HList, S <: HList] {
+ def apply(accP: AccP, accS: AccS): (P, S)
+ }
+
+ trait LowPrioritySplitLeft0 {
+ implicit def hlistSplitLeft1[AccP <: HList, AccSH, AccST <: HList, U, P <: HList, S <: HList](implicit slt: SplitLeft0[AccP, AccST, U, P, S]): SplitLeft0[AccP, AccSH :: AccST, U, AccSH :: P, S] =
+ new SplitLeft0[AccP, AccSH :: AccST, U, AccSH :: P, S] {
+ def apply(accP: AccP, accS: AccSH :: AccST): (AccSH :: P, S) =
+ slt(accP, accS.tail) match { case (prefix, suffix) ⇒ (accS.head :: prefix, suffix) }
+ }
+ }
+
+ object SplitLeft0 extends LowPrioritySplitLeft0 {
+ implicit def hlistSplitLeft2[P <: HList, SH, ST <: HList]: SplitLeft0[P, SH :: ST, SH, P, SH :: ST] =
+ new SplitLeft0[P, SH :: ST, SH, P, SH :: ST] {
+ def apply(accP: P, accS: SH :: ST): (P, SH :: ST) = (accP, accS)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this `HList` at the first occurence of an element of type `U` returning the reverse
+ * prefix and suffix as a pair. Available only if this `HList` contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait ReverseSplitLeft[L <: HList, U] extends DepFn1[L]
+
+ object ReverseSplitLeft {
+ def apply[L <: HList, U](implicit split: ReverseSplitLeft[L, U]): Aux[L, U, split.Out] = split
+
+ type Aux[L <: HList, U, Out0] = ReverseSplitLeft[L, U] { type Out = Out0 }
+
+ implicit def reverseSplitLeft[L <: HList, U, P <: HList, S <: HList](implicit splitLeft: ReverseSplitLeft0[HNil, L, U, P, S]): Aux[L, U, (P, S)] =
+ new ReverseSplitLeft[L, U] {
+ type Out = (P, S)
+ def apply(l: L): Out = splitLeft(HNil, l)
+ }
+
+ trait ReverseSplitLeft0[AccP <: HList, AccS <: HList, U, P, S] {
+ def apply(accP: AccP, accS: AccS): (P, S)
+ }
+
+ trait LowPriorityReverseSplitLeft0 {
+ implicit def hlistReverseSplitLeft1[AccP <: HList, AccSH, AccST <: HList, U, P, S](implicit slt: ReverseSplitLeft0[AccSH :: AccP, AccST, U, P, S]): ReverseSplitLeft0[AccP, AccSH :: AccST, U, P, S] =
+ new ReverseSplitLeft0[AccP, AccSH :: AccST, U, P, S] {
+ def apply(accP: AccP, accS: AccSH :: AccST): (P, S) = slt(accS.head :: accP, accS.tail)
+ }
+ }
+
+ object ReverseSplitLeft0 extends LowPriorityReverseSplitLeft0 {
+ implicit def hlistReverseSplitLeft2[P <: HList, SH, ST <: HList]: ReverseSplitLeft0[P, SH :: ST, SH, P, SH :: ST] =
+ new ReverseSplitLeft0[P, SH :: ST, SH, P, SH :: ST] {
+ def apply(accP: P, accS: SH :: ST): (P, SH :: ST) = (accP, accS)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this `HList` at the last occurence of an element of type `U` returning the prefix
+ * and suffix as a pair. Available only if this `HList` contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait SplitRight[L <: HList, U] extends DepFn1[L]
+
+ object SplitRight {
+ def apply[L <: HList, U](implicit split: SplitRight[L, U]): Aux[L, U, split.Out] = split
+
+ type Aux[L <: HList, U, Out0] = SplitRight[L, U] { type Out = Out0 }
+
+ implicit def splitRight[L <: HList, U, P <: HList, S <: HList](implicit splitRight: SplitRight0[L, HNil, HNil, U, P, S]): Aux[L, U, (P, S)] =
+ new SplitRight[L, U] {
+ type Out = (P, S)
+ def apply(l: L): Out = splitRight(l, HNil, HNil)
+ }
+
+ trait SplitRight0[Rev <: HList, AccP <: HList, AccS <: HList, U, P <: HList, S <: HList] {
+ def apply(rev: Rev, accP: AccP, accS: AccS): (P, S)
+ }
+
+ trait LowPrioritySplitRight0 {
+ implicit def hlistSplitRight1[RevH, RevT <: HList, AccP <: HList, U, P <: HList, S <: HList](implicit srt: SplitRight0[RevT, RevH :: AccP, HNil, U, P, S]): SplitRight0[RevH :: RevT, AccP, HNil, U, P, S] =
+ new SplitRight0[RevH :: RevT, AccP, HNil, U, P, S] {
+ def apply(rev: RevH :: RevT, accP: AccP, accS: HNil): (P, S) = srt(rev.tail, rev.head :: accP, accS)
+ }
+
+ implicit def hlistSplitRight2[AccPH, AccPT <: HList, AccS <: HList, U, P <: HList, S <: HList](implicit srt: SplitRight0[HNil, AccPT, AccPH :: AccS, U, P, S]): SplitRight0[HNil, AccPH :: AccPT, AccS, U, P, S] =
+ new SplitRight0[HNil, AccPH :: AccPT, AccS, U, P, S] {
+ def apply(rev: HNil, accP: AccPH :: AccPT, accS: AccS): (P, S) = srt(rev, accP.tail, accP.head :: accS)
+ }
+ }
+
+ object SplitRight0 extends LowPrioritySplitRight0 {
+ implicit def hlistSplitRight3[PH, PT <: HList, S <: HList](implicit reverse: Reverse[PH :: PT]): SplitRight0[HNil, PH :: PT, S, PH, reverse.Out, S] =
+ new SplitRight0[HNil, PH :: PT, S, PH, reverse.Out, S] {
+ def apply(rev: HNil, accP: PH :: PT, accS: S): (reverse.Out, S) = (accP.reverse, accS)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this `HList` at the last occurence of an element of type `U` returning the reverse
+ * prefix and suffix as a pair. Available only if this `HList` contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait ReverseSplitRight[L <: HList, U] extends DepFn1[L]
+
+ object ReverseSplitRight {
+ def apply[L <: HList, U](implicit split: ReverseSplitRight[L, U]): Aux[L, U, split.Out] = split
+
+ type Aux[L <: HList, U, Out0] = ReverseSplitRight[L, U] { type Out = Out0 }
+
+ implicit def reverseSplitRight[L <: HList, U, P <: HList, S <: HList](implicit splitRight: ReverseSplitRight0[L, HNil, HNil, U, P, S]): Aux[L, U, (P, S)] =
+ new ReverseSplitRight[L, U] {
+ type Out = (P, S)
+ def apply(l: L): Out = splitRight(l, HNil, HNil)
+ }
+
+ trait ReverseSplitRight0[Rev <: HList, AccP <: HList, AccS <: HList, U, P, S] {
+ def apply(rev: Rev, accP: AccP, accS: AccS): (P, S)
+ }
+
+ trait LowPriorityReverseSplitRight0 {
+ implicit def hlistReverseSplitRight1[RevH, RevT <: HList, AccP <: HList, U, P <: HList, S <: HList](implicit srt: ReverseSplitRight0[RevT, RevH :: AccP, HNil, U, P, S]): ReverseSplitRight0[RevH :: RevT, AccP, HNil, U, P, S] =
+ new ReverseSplitRight0[RevH :: RevT, AccP, HNil, U, P, S] {
+ def apply(rev: RevH :: RevT, accP: AccP, accS: HNil): (P, S) = srt(rev.tail, rev.head :: accP, accS)
+ }
+
+ implicit def hlistReverseSplitRight2[AccPH, AccPT <: HList, AccS <: HList, U, P <: HList, S <: HList](implicit srt: ReverseSplitRight0[HNil, AccPT, AccPH :: AccS, U, P, S]): ReverseSplitRight0[HNil, AccPH :: AccPT, AccS, U, P, S] =
+ new ReverseSplitRight0[HNil, AccPH :: AccPT, AccS, U, P, S] {
+ def apply(rev: HNil, accP: AccPH :: AccPT, accS: AccS): (P, S) = srt(rev, accP.tail, accP.head :: accS)
+ }
+ }
+
+ object ReverseSplitRight0 extends LowPriorityReverseSplitRight0 {
+ implicit def hlistReverseSplitRight3[PH, PT <: HList, S <: HList]: ReverseSplitRight0[HNil, PH :: PT, S, PH, PH :: PT, S] =
+ new ReverseSplitRight0[HNil, PH :: PT, S, PH, PH :: PT, S] {
+ def apply(rev: HNil, accP: PH :: PT, accS: S): (PH :: PT, S) = (accP, accS)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting reversing this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Reverse[L <: HList] extends DepFn1[L] { type Out <: HList }
+
+ object Reverse {
+ def apply[L <: HList](implicit reverse: Reverse[L]): Aux[L, reverse.Out] = reverse
+
+ type Aux[L <: HList, Out0 <: HList] = Reverse[L] { type Out = Out0 }
+
+ implicit def reverse[L <: HList, Out0 <: HList](implicit reverse: Reverse0[HNil, L, Out0]): Aux[L, Out0] =
+ new Reverse[L] {
+ type Out = Out0
+ def apply(l: L): Out = reverse(HNil, l)
+ }
+
+ trait Reverse0[Acc <: HList, L <: HList, Out <: HList] {
+ def apply(acc: Acc, l: L): Out
+ }
+
+ object Reverse0 {
+ implicit def hnilReverse[Out <: HList]: Reverse0[Out, HNil, Out] =
+ new Reverse0[Out, HNil, Out] {
+ def apply(acc: Out, l: HNil): Out = acc
+ }
+
+ implicit def hlistReverse[Acc <: HList, InH, InT <: HList, Out <: HList](implicit rt: Reverse0[InH :: Acc, InT, Out]): Reverse0[Acc, InH :: InT, Out] =
+ new Reverse0[Acc, InH :: InT, Out] {
+ def apply(acc: Acc, l: InH :: InT): Out = rt(l.head :: acc, l.tail)
+ }
+ }
+ }
+
+ /**
+ * Type class supporting prepending to this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Prepend[P <: HList, S <: HList] extends DepFn2[P, S] { type Out <: HList }
+
+ trait LowPriorityPrepend {
+ type Aux[P <: HList, S <: HList, Out0 <: HList] = Prepend[P, S] { type Out = Out0 }
+
+ implicit def hnilPrepend0[P <: HList, S <: HNil]: Aux[P, S, P] =
+ new Prepend[P, S] {
+ type Out = P
+ def apply(prefix: P, suffix: S): P = prefix
+ }
+ }
+
+ object Prepend extends LowPriorityPrepend {
+ def apply[P <: HList, S <: HList](implicit prepend: Prepend[P, S]): Aux[P, S, prepend.Out] = prepend
+
+ implicit def hnilPrepend1[P <: HNil, S <: HList]: Aux[P, S, S] =
+ new Prepend[P, S] {
+ type Out = S
+ def apply(prefix: P, suffix: S): S = suffix
+ }
+
+ implicit def hlistPrepend[PH, PT <: HList, S <: HList](implicit pt: Prepend[PT, S]): Aux[PH :: PT, S, PH :: pt.Out] =
+ new Prepend[PH :: PT, S] {
+ type Out = PH :: pt.Out
+ def apply(prefix: PH :: PT, suffix: S): Out = prefix.head :: pt(prefix.tail, suffix)
+ }
+ }
+
+ /**
+ * Type class supporting reverse prepending to this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait ReversePrepend[P <: HList, S <: HList] extends DepFn2[P, S] { type Out <: HList }
+
+ trait LowPriorityReversePrepend {
+ type Aux[P <: HList, S <: HList, Out0 <: HList] = ReversePrepend[P, S] { type Out = Out0 }
+
+ implicit def hnilReversePrepend0[P <: HList, S <: HNil](implicit rv: Reverse[P]): Aux[P, S, rv.Out] =
+ new ReversePrepend[P, S] {
+ type Out = rv.Out
+ def apply(prefix: P, suffix: S) = prefix.reverse
+ }
+ }
+
+ object ReversePrepend extends LowPriorityReversePrepend {
+ def apply[P <: HList, S <: HList](implicit prepend: ReversePrepend[P, S]): Aux[P, S, prepend.Out] = prepend
+
+ implicit def hnilReversePrepend1[P <: HNil, S <: HList]: Aux[P, S, S] =
+ new ReversePrepend[P, S] {
+ type Out = S
+ def apply(prefix: P, suffix: S) = suffix
+ }
+
+ implicit def hlistReversePrepend[PH, PT <: HList, S <: HList](implicit rpt: ReversePrepend[PT, PH :: S]): Aux[PH :: PT, S, rpt.Out] =
+ new ReversePrepend[PH :: PT, S] {
+ type Out = rpt.Out
+ def apply(prefix: PH :: PT, suffix: S): Out = rpt(prefix.tail, prefix.head :: suffix)
+ }
+ }
+
+ /**
+ * Type class supporting zipping this `HList` with an `HList` of `HList`s returning an `HList` of `HList`s with each
+ * element of this `HList` prepended to the corresponding `HList` element of the argument `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait ZipOne[H <: HList, T <: HList] extends DepFn2[H, T] { type Out <: HList }
+
+ object ZipOne {
+ def apply[H <: HList, T <: HList](implicit zip: ZipOne[H, T]): Aux[H, T, zip.Out] = zip
+
+ type Aux[H <: HList, T <: HList, Out0 <: HList] = ZipOne[H, T] { type Out = Out0 }
+
+ implicit def zipOne1[H <: HList]: Aux[H, HNil, HNil] =
+ new ZipOne[H, HNil] {
+ type Out = HNil
+ def apply(h: H, t: HNil): Out = HNil
+ }
+
+ implicit def zipOne2[T <: HList]: Aux[HNil, T, HNil] =
+ new ZipOne[HNil, T] {
+ type Out = HNil
+ def apply(h: HNil, t: T): Out = HNil
+ }
+
+ implicit def zipOne3[H, T <: HList]: Aux[H :: HNil, T :: HNil, (H :: T) :: HNil] =
+ new ZipOne[H :: HNil, T :: HNil] {
+ type Out = (H :: T) :: HNil
+ def apply(h: H :: HNil, t: T :: HNil): Out = (h.head :: t.head) :: HNil
+ }
+
+ implicit def zipOne4[HH, HT <: HList, TH <: HList, TT <: HList](implicit zot: ZipOne[HT, TT]): Aux[HH :: HT, TH :: TT, (HH :: TH) :: zot.Out] =
+ new ZipOne[HH :: HT, TH :: TT] {
+ type Out = (HH :: TH) :: zot.Out
+ def apply(h: HH :: HT, t: TH :: TT): Out = (h.head :: t.head) :: zot(h.tail, t.tail)
+ }
+ }
+
+ /**
+ * Type class supporting transposing this `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Transposer[L <: HList] extends DepFn1[L] { type Out <: HList }
+
+ object Transposer {
+ def apply[L <: HList](implicit transposer: Transposer[L]): Aux[L, transposer.Out] = transposer
+
+ type Aux[L <: HList, Out0 <: HList] = Transposer[L] { type Out = Out0 }
+
+ implicit def hnilTransposer: Aux[HNil, HNil] =
+ new Transposer[HNil] {
+ type Out = HNil
+ def apply(l: HNil): Out = l
+ }
+
+ implicit def hlistTransposer1[H <: HList, MC <: HList, Out0 <: HList](implicit mc: ConstMapper.Aux[HNil, H, MC], zo: ZipOne.Aux[H, MC, Out0]): Aux[H :: HNil, Out0] =
+ new Transposer[H :: HNil] {
+ type Out = Out0
+ def apply(l: H :: HNil): Out = zo(l.head, mc(HNil, l.head))
+ }
+
+ implicit def hlistTransposer2[H <: HList, TH <: HList, TT <: HList, OutT <: HList, Out0 <: HList](implicit tt: Aux[TH :: TT, OutT], zo: ZipOne.Aux[H, OutT, Out0]): Aux[H :: TH :: TT, Out0] =
+ new Transposer[H :: TH :: TT] {
+ type Out = Out0
+ def apply(l: H :: TH :: TT): Out = zo(l.head, tt(l.tail))
+ }
+ }
+
+ /**
+ * Type class supporting zipping this `HList` of `HList`s returning an `HList` of tuples.
+ *
+ * @author Miles Sabin
+ */
+ trait Zip[L <: HList] extends DepFn1[L] { type Out <: HList }
+
+ object Zip {
+ def apply[L <: HList](implicit zip: Zip[L]): Aux[L, zip.Out] = zip
+
+ type Aux[L <: HList, Out0 <: HList] = Zip[L] { type Out = Out0 }
+
+ implicit def zipper[L <: HList, OutT <: HList](implicit transposer: Transposer.Aux[L, OutT],
+ mapper: Mapper[tupled.type, OutT]): Aux[L, mapper.Out] =
+ new Zip[L] {
+ type Out = mapper.Out
+ def apply(l: L): Out = l.transpose map tupled
+ }
+ }
+
+ /**
+ * Type class supporting unzipping this `HList` of tuples returning a tuple of `HList`s.
+ *
+ * @author Miles Sabin
+ */
+ trait Unzip[L <: HList] extends DepFn1[L]
+
+ object Unzip {
+ def apply[L <: HList](implicit unzip: Unzip[L]): Aux[L, unzip.Out] = unzip
+
+ type Aux[L <: HList, Out0] = Unzip[L] { type Out = Out0 }
+
+ implicit def unzipper[L <: HList, OutM <: HList, OutT <: HList](implicit mapper: Mapper.Aux[productElements.type, L, OutM],
+ transposer: Transposer.Aux[OutM, OutT],
+ tupler: Tupler[OutT]): Aux[L, tupler.Out] =
+ new Unzip[L] {
+ type Out = tupler.Out
+ def apply(l: L): Out = (l map productElements).transpose.tupled
+ }
+ }
+
+ /**
+ * Type class supporting zipping this `HList` of monomorphic function values with its argument `HList` of
+ * correspondingly typed function arguments returning the result of each application as an `HList`. Available only if
+ * there is evidence that the corresponding function and argument elements have compatible types.
+ *
+ * @author Miles Sabin
+ */
+ trait ZipApply[FL <: HList, AL <: HList] extends DepFn2[FL, AL] { type Out <: HList }
+
+ object ZipApply {
+ def apply[FL <: HList, AL <: HList](implicit zip: ZipApply[FL, AL]): Aux[FL, AL, zip.Out] = zip
+
+ type Aux[FL <: HList, AL <: HList, Out0 <: HList] = ZipApply[FL, AL] { type Out = Out0 }
+
+ implicit def hnilZipApply: Aux[HNil, HNil, HNil] =
+ new ZipApply[HNil, HNil] {
+ type Out = HNil
+ def apply(fl: HNil, al: HNil): Out = HNil
+ }
+
+ implicit def hconsZipApply[T, R, FLT <: HList, ALT <: HList](implicit ztt: ZipApply[FLT, ALT]): Aux[(T ⇒ R) :: FLT, T :: ALT, R :: ztt.Out] =
+ new ZipApply[(T ⇒ R) :: FLT, T :: ALT] {
+ type Out = R :: ztt.Out
+ def apply(fl: (T ⇒ R) :: FLT, al: T :: ALT): Out = fl.head(al.head) :: ztt(fl.tail, al.tail)
+ }
+ }
+
+ /**
+ * Type class supporting zipping an `HList` with a constant, resulting in an `HList` of tuples of the form
+ * ({element from input `HList`}, {supplied constant})
+ *
+ * @author Cody Allen
+ */
+ trait ZipConst[C, L <: HList] extends DepFn2[C, L] { type Out <: HList }
+
+ object ZipConst {
+ def apply[C, L <: HList](implicit zip: ZipConst[C, L]): Aux[C, L, zip.Out] = zip
+
+ type Aux[C, L <: HList, Out0 <: HList] = ZipConst[C, L] { type Out = Out0 }
+
+ implicit def constZipper[C, L <: HList, M <: HList](implicit mapper: ConstMapper.Aux[C, L, M],
+ zipper: Zip[L :: M :: HNil]): Aux[C, L, zipper.Out] =
+ new ZipConst[C, L] {
+ type Out = zipper.Out
+ def apply(c: C, l: L) = zipper(l :: mapper(c, l) :: HNil)
+ }
+ }
+
+ /**
+ * Type class supporting zipping an 'HList' with another 'HList' using a 'Poly2' resulting in an HList
+ *
+ * @author Stacy Curl
+ */
+ trait ZipWith[L <: HList, R <: HList, P <: Poly2] extends DepFn2[L, R] { type Out <: HList }
+
+ object ZipWith {
+ def apply[L <: HList, R <: HList, P <: Poly2](implicit zipWith: ZipWith[L, R, P]): Aux[L, R, P, zipWith.Out] = zipWith
+
+ type Aux[L <: HList, R <: HList, P <: Poly2, Out0 <: HList] = ZipWith[L, R, P] { type Out = Out0 }
+
+ implicit def hnilZipWithHNil[P <: Poly2]: Aux[HNil, HNil, P, HNil] = constZipWith[HNil, HNil, P]
+ implicit def hnilZipWithHList[R <: HList, P <: Poly2]: Aux[HNil, R, P, HNil] = constZipWith[HNil, R, P]
+ implicit def hlistZipWithHNil[L <: HList, P <: Poly2]: Aux[L, HNil, P, HNil] = constZipWith[L, HNil, P]
+
+ implicit def hlistZipWithHList[LH, RH, LT <: HList, RT <: HList, P <: Poly2](implicit zipWith: ZipWith[LT, RT, P], clr: Case2[P, LH, RH]): Aux[LH :: LT, RH :: RT, P, clr.Result :: zipWith.Out] =
+ new ZipWith[LH :: LT, RH :: RT, P] {
+ type Out = clr.Result :: zipWith.Out
+ def apply(l: LH :: LT, r: RH :: RT): Out =
+ clr(l.head, r.head) :: zipWith(l.tail, r.tail)
+ }
+
+ private def constZipWith[L <: HList, R <: HList, P <: Poly2]: Aux[L, R, P, HNil] =
+ new ZipWith[L, R, P] {
+ type Out = HNil
+ def apply(l: L, r: R): HNil = HNil
+ }
+ }
+
+ /**
+ * Type class supporting zipping an `HList` of values with an `HList` of keys to create a record.
+ *
+ * @author Cody Allen
+ */
+ trait ZipWithKeys[K <: HList, V <: HList] extends DepFn2[K, V] { type Out <: HList }
+
+ object ZipWithKeys {
+ import akka.shapeless.record._
+
+ def apply[K <: HList, V <: HList](implicit zipWithKeys: ZipWithKeys[K, V]): Aux[K, V, zipWithKeys.Out] = zipWithKeys
+
+ type Aux[K <: HList, V <: HList, Out0 <: HList] = ZipWithKeys[K, V] { type Out = Out0 }
+
+ implicit val hnilZipWithKeys: Aux[HNil, HNil, HNil] = new ZipWithKeys[HNil, HNil] {
+ type Out = HNil
+ def apply(k: HNil, v: HNil) = HNil
+ }
+
+ implicit def hconsZipWithKeys[KH, VH, KT <: HList, VT <: HList](implicit zipWithKeys: ZipWithKeys[KT, VT], wkh: Witness.Aux[KH]): Aux[KH :: KT, VH :: VT, FieldType[KH, VH] :: zipWithKeys.Out] =
+ new ZipWithKeys[KH :: KT, VH :: VT] {
+ type Out = FieldType[KH, VH] :: zipWithKeys.Out
+ def apply(k: KH :: KT, v: VH :: VT): Out =
+ field[wkh.T](v.head) :: zipWithKeys(k.tail, v.tail)
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/nat.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/nat.scala
new file mode 100644
index 0000000000..da712b7143
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/nat.scala
@@ -0,0 +1,210 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+object nat {
+ /**
+ * Type class witnessing that `B` is the predecessor of `A`.
+ *
+ * @author Miles Sabin
+ */
+ trait Pred[A <: Nat] { type Out <: Nat }
+
+ object Pred {
+ def apply[A <: Nat](implicit pred: Pred[A]): Aux[A, pred.Out] = pred
+
+ type Aux[A <: Nat, B <: Nat] = Pred[A] { type Out = B }
+
+ implicit def pred[B <: Nat]: Aux[Succ[B], B] = new Pred[Succ[B]] { type Out = B }
+ }
+
+ /**
+ * Type class witnessing that `C` is the sum of `A` and `B`.
+ *
+ * @author Miles Sabin
+ */
+ trait Sum[A <: Nat, B <: Nat] { type Out <: Nat }
+
+ object Sum {
+ def apply[A <: Nat, B <: Nat](implicit sum: Sum[A, B]): Aux[A, B, sum.Out] = sum
+
+ type Aux[A <: Nat, B <: Nat, C <: Nat] = Sum[A, B] { type Out = C }
+
+ implicit def sum1[B <: Nat]: Aux[_0, B, B] = new Sum[_0, B] { type Out = B }
+ implicit def sum2[A <: Nat, B <: Nat](implicit sum: Sum[A, Succ[B]]): Aux[Succ[A], B, sum.Out] = new Sum[Succ[A], B] { type Out = sum.Out }
+ }
+
+ /**
+ * Type class witnessing that `C` is the difference of `A` and `B`.
+ *
+ * @author Miles Sabin
+ */
+ trait Diff[A <: Nat, B <: Nat] { type Out <: Nat }
+
+ object Diff {
+ def apply[A <: Nat, B <: Nat](implicit diff: Diff[A, B]): Aux[A, B, diff.Out] = diff
+
+ type Aux[A <: Nat, B <: Nat, C <: Nat] = Diff[A, B] { type Out = C }
+
+ implicit def diff1[A <: Nat]: Aux[A, _0, A] = new Diff[A, _0] { type Out = A }
+ implicit def diff2[A <: Nat, B <: Nat](implicit diff: Diff[A, B]): Aux[Succ[A], Succ[B], diff.Out] = new Diff[Succ[A], Succ[B]] { type Out = diff.Out }
+ }
+
+ /**
+ * Type class witnessing that `C` is the product of `A` and `B`.
+ *
+ * @author Miles Sabin
+ */
+ trait Prod[A <: Nat, B <: Nat] { type Out <: Nat }
+
+ object Prod {
+ def apply[A <: Nat, B <: Nat](implicit prod: Prod[A, B]): Aux[A, B, prod.Out] = prod
+
+ type Aux[A <: Nat, B <: Nat, C <: Nat] = Prod[A, B] { type Out = C }
+
+ implicit def prod1[B <: Nat]: Aux[_0, B, _0] = new Prod[_0, B] { type Out = _0 }
+ implicit def prod2[A <: Nat, B <: Nat, C <: Nat](implicit prod: Prod.Aux[A, B, C], sum: Sum[B, C]): Aux[Succ[A], B, sum.Out] = new Prod[Succ[A], B] { type Out = sum.Out }
+ }
+
+ /**
+ * Type class witnessing that `Out` is the quotient of `A` and `B`.
+ *
+ * @author Tom Switzer
+ */
+ trait Div[A <: Nat, B <: Nat] { type Out <: Nat }
+
+ object Div {
+ def apply[A <: Nat, B <: Nat](implicit div: Div[A, B]): Aux[A, B, div.Out] = div
+
+ import LT._
+
+ type Aux[A <: Nat, B <: Nat, C <: Nat] = Div[A, B] { type Out = C }
+
+ implicit def div1[A <: Nat]: Aux[_0, A, _0] = new Div[_0, A] { type Out = _0 }
+
+ implicit def div2[A <: Nat, B <: Nat](implicit lt: A < B): Aux[A, B, _0] =
+ new Div[A, B] { type Out = _0 }
+
+ implicit def div3[A <: Nat, B <: Nat, C <: Nat, D <: Nat](implicit diff: Diff.Aux[Succ[A], B, C], div: Div.Aux[C, B, D]): Aux[Succ[A], B, Succ[D]] =
+ new Div[Succ[A], B] { type Out = Succ[D] }
+ }
+
+ /**
+ * Typeclass witnessing that `Out` is `A` mod `B`.
+ *
+ * @author Tom Switzer
+ */
+ trait Mod[A <: Nat, B <: Nat] { type Out <: Nat }
+
+ object Mod {
+ def apply[A <: Nat, B <: Nat](implicit mod: Mod[A, B]): Aux[A, B, mod.Out] = mod
+
+ type Aux[A <: Nat, B <: Nat, C <: Nat] = Mod[A, B] { type Out = C }
+
+ implicit def modAux[A <: Nat, B <: Nat, C <: Nat, D <: Nat, E <: Nat](implicit div: Div.Aux[A, B, C], prod: Prod.Aux[C, B, D], diff: Diff.Aux[A, D, E]): Aux[A, B, E] =
+ new Mod[A, B] { type Out = E }
+ }
+
+ /**
+ * Type class witnessing that `A` is less than `B`.
+ *
+ * @author Miles Sabin
+ */
+ trait LT[A <: Nat, B <: Nat]
+
+ object LT {
+ def apply[A <: Nat, B <: Nat](implicit lt: A < B) = lt
+
+ type <[A <: Nat, B <: Nat] = LT[A, B]
+
+ implicit def lt1[B <: Nat] = new <[_0, Succ[B]] {}
+ implicit def lt2[A <: Nat, B <: Nat](implicit lt: A < B) = new <[Succ[A], Succ[B]] {}
+ }
+
+ /**
+ * Type class witnessing that `A` is less than or equal to `B`.
+ *
+ * @author Miles Sabin
+ */
+ trait LTEq[A <: Nat, B <: Nat]
+
+ object LTEq {
+ def apply[A <: Nat, B <: Nat](implicit lteq: A <= B) = lteq
+
+ type <=[A <: Nat, B <: Nat] = LTEq[A, B]
+
+ implicit def ltEq1 = new <=[_0, _0] {}
+ implicit def ltEq2[B <: Nat] = new <=[_0, Succ[B]] {}
+ implicit def ltEq3[A <: Nat, B <: Nat](implicit lteq: A <= B) = new <=[Succ[A], Succ[B]] {}
+ }
+
+ /**
+ * Type class witnessing that `Out` is `A` min `B`.
+ *
+ * @author George Leontiev
+ */
+ trait Min[A <: Nat, B <: Nat] { type Out <: Nat }
+
+ object Min {
+ def apply[A <: Nat, B <: Nat](implicit min: Min[A, B]): Aux[A, B, min.Out] = min
+
+ type Aux[A <: Nat, B <: Nat, C <: Nat] = Min[A, B] { type Out = C }
+
+ implicit def minAux0[A <: Nat, B <: Nat, C <: Nat](implicit lteq: LTEq[A, B]): Aux[A, B, A] = new Min[A, B] { type Out = A }
+ implicit def minAux1[A <: Nat, B <: Nat, C <: Nat](implicit lteq: LT[B, A]): Aux[A, B, B] = new Min[A, B] { type Out = B }
+ }
+
+ /**
+ * Type class witnessing that `Out` is `X` raised to the power `N`.
+ *
+ * @author George Leontiev
+ */
+ trait Pow[N <: Nat, X <: Nat] { type Out <: Nat }
+
+ object Pow {
+ def apply[A <: Nat, B <: Nat](implicit pow: Pow[A, B]): Aux[A, B, pow.Out] = pow
+
+ import akka.shapeless.nat._1
+
+ type Aux[N <: Nat, X <: Nat, Z <: Nat] = Pow[N, X] { type Out = Z }
+
+ implicit def pow1[A <: Nat]: Aux[Succ[A], _0, _0] = new Pow[Succ[A], _0] { type Out = _0 }
+ implicit def pow2[A <: Nat]: Aux[_0, Succ[A], _1] = new Pow[_0, Succ[A]] { type Out = _1 }
+ implicit def pow3[N <: Nat, X <: Nat, Z <: Nat, Y <: Nat](implicit ev: Pow.Aux[N, X, Z], ev2: Prod.Aux[Z, X, Y]): Aux[Succ[N], X, Y] = new Pow[Succ[N], X] { type Out = Y }
+ }
+
+ /**
+ * Type class supporting conversion of type-level Nats to value level Ints.
+ *
+ * @author Miles Sabin
+ */
+ trait ToInt[N <: Nat] {
+ def apply(): Int
+ }
+
+ object ToInt {
+ def apply[N <: Nat](implicit toInt: ToInt[N]): ToInt[N] = toInt
+
+ implicit val toInt0 = new ToInt[_0] {
+ def apply() = 0
+ }
+ implicit def toIntSucc[N <: Nat](implicit toIntN: ToInt[N]) = new ToInt[Succ[N]] {
+ def apply() = toIntN() + 1
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/products.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/products.scala
new file mode 100644
index 0000000000..520b79e20d
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/products.scala
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+import hlist.Length
+
+object product {
+ trait ProductLength[T] extends DepFn1[T]
+
+ object ProductLength {
+ def apply[T](implicit length: ProductLength[T]): Aux[T, length.Out] = length
+
+ type Aux[T, Out0] = ProductLength[T] { type Out = Out0 }
+
+ implicit def length[T, L <: HList](implicit gen: Generic.Aux[T, L], length: Length[L]): Aux[T, length.Out] =
+ new ProductLength[T] {
+ type Out = length.Out
+ def apply(t: T): Out = length()
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/records.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/records.scala
new file mode 100644
index 0000000000..0eedd7cb58
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/records.scala
@@ -0,0 +1,223 @@
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+//object record {
+// Ideally this would be an object rather than a package, however that appears
+// to trip bugs in implicit resolution which manifest in the use of WitnessWith
+// in updateWith
+package record {
+ import akka.shapeless.record._
+
+ /**
+ * Type class supporting record field selection.
+ *
+ * @author Miles Sabin
+ */
+ @annotation.implicitNotFound(msg = "No field ${K} in record ${L}")
+ trait Selector[L <: HList, K] {
+ type Out
+ def apply(l: L): Out
+ }
+
+ trait LowPrioritySelector {
+ type Aux[L <: HList, K, Out0] = Selector[L, K] { type Out = Out0 }
+
+ implicit def hlistSelect[H, T <: HList, K](implicit st: Selector[T, K]): Aux[H :: T, K, st.Out] =
+ new Selector[H :: T, K] {
+ type Out = st.Out
+ def apply(l: H :: T): Out = st(l.tail)
+ }
+ }
+
+ object Selector extends LowPrioritySelector {
+ def apply[L <: HList, K](implicit selector: Selector[L, K]): Aux[L, K, selector.Out] = selector
+
+ implicit def hlistSelect1[K, V, T <: HList]: Aux[FieldType[K, V] :: T, K, V] =
+ new Selector[FieldType[K, V]:: T, K] {
+ type Out = V
+ def apply(l: FieldType[K, V] :: T): Out = l.head
+ }
+ }
+
+ /**
+ * Type class supporting record update and extension.
+ *
+ * @author Miles Sabin
+ */
+ trait Updater[L <: HList, F] extends DepFn2[L, F] { type Out <: HList }
+
+ trait LowPriorityUpdater {
+ type Aux[L <: HList, F, Out0 <: HList] = Updater[L, F] { type Out = Out0 }
+
+ implicit def hlistUpdater1[H, T <: HList, K, V](implicit ut: Updater[T, FieldType[K, V]]): Aux[H :: T, FieldType[K, V], H :: ut.Out] =
+ new Updater[H :: T, FieldType[K, V]] {
+ type Out = H :: ut.Out
+ def apply(l: H :: T, f: FieldType[K, V]): Out = l.head :: ut(l.tail, f)
+ }
+ }
+
+ object Updater extends LowPriorityUpdater {
+ def apply[L <: HList, F](implicit updater: Updater[L, F]): Aux[L, F, updater.Out] = updater
+
+ implicit def hnilUpdater[L <: HNil, F]: Aux[L, F, F :: HNil] =
+ new Updater[L, F] {
+ type Out = F :: HNil
+ def apply(l: L, f: F): Out = f :: HNil
+ }
+
+ implicit def hlistUpdater2[K, V, T <: HList]: Aux[FieldType[K, V] :: T, FieldType[K, V], FieldType[K, V] :: T] =
+ new Updater[FieldType[K, V]:: T, FieldType[K, V]] {
+ type Out = FieldType[K, V] :: T
+ def apply(l: FieldType[K, V] :: T, f: FieldType[K, V]): Out = f :: l.tail
+ }
+ }
+
+ /**
+ * Type class supporting modification of a record field by given function.
+ *
+ * @author Joni Freeman
+ */
+ @annotation.implicitNotFound(msg = "No field ${F} with value of type ${A} in record ${L}")
+ trait Modifier[L <: HList, F, A, B] extends DepFn2[L, A ⇒ B] { type Out <: HList }
+
+ object Modifier {
+ def apply[L <: HList, F, A, B](implicit modifier: Modifier[L, F, A, B]): Aux[L, F, A, B, modifier.Out] = modifier
+
+ type Aux[L <: HList, F, A, B, Out0 <: HList] = Modifier[L, F, A, B] { type Out = Out0 }
+
+ implicit def hlistModify1[F, A, B, T <: HList]: Aux[FieldType[F, A] :: T, F, A, B, FieldType[F, B] :: T] =
+ new Modifier[FieldType[F, A]:: T, F, A, B] {
+ type Out = FieldType[F, B] :: T
+ def apply(l: FieldType[F, A] :: T, f: A ⇒ B): Out = field[F](f(l.head)) :: l.tail
+ }
+
+ implicit def hlistModify[H, T <: HList, F, A, B](implicit mt: Modifier[T, F, A, B]): Aux[H :: T, F, A, B, H :: mt.Out] =
+ new Modifier[H :: T, F, A, B] {
+ type Out = H :: mt.Out
+ def apply(l: H :: T, f: A ⇒ B): Out = l.head :: mt(l.tail, f)
+ }
+ }
+
+ /**
+ * Type class supporting record field removal.
+ *
+ * @author Miles Sabin
+ */
+ @annotation.implicitNotFound(msg = "No field ${K} in record ${L}")
+ trait Remover[L <: HList, K] extends DepFn1[L]
+
+ trait LowPriorityRemover {
+ type Aux[L <: HList, K, Out0] = Remover[L, K] { type Out = Out0 }
+
+ implicit def hlistRemove[H, T <: HList, K, V, OutT <: HList](implicit rt: Aux[T, K, (V, OutT)]): Aux[H :: T, K, (V, H :: OutT)] =
+ new Remover[H :: T, K] {
+ type Out = (V, H :: OutT)
+ def apply(l: H :: T): Out = {
+ val (v, tail) = rt(l.tail)
+ (v, l.head :: tail)
+ }
+ }
+ }
+
+ object Remover extends LowPriorityRemover {
+ def apply[L <: HList, K](implicit remover: Remover[L, K]): Aux[L, K, remover.Out] = remover
+
+ implicit def hlistRemove1[K, V, T <: HList]: Aux[FieldType[K, V] :: T, K, (V, T)] =
+ new Remover[FieldType[K, V]:: T, K] {
+ type Out = (V, T)
+ def apply(l: FieldType[K, V] :: T): Out = (l.head, l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting renaming of a record field.
+ *
+ * @author Joni Freeman
+ */
+ @annotation.implicitNotFound(msg = "No field ${K1} in record ${L}")
+ trait Renamer[L <: HList, K1, K2] extends DepFn1[L] { type Out <: HList }
+
+ object Renamer {
+ def apply[L <: HList, K1, K2](implicit renamer: Renamer[L, K1, K2]): Aux[L, K1, K2, renamer.Out] = renamer
+
+ type Aux[L <: HList, K1, K2, Out0 <: HList] = Renamer[L, K1, K2] { type Out = Out0 }
+
+ implicit def hlistRenamer1[T <: HList, K1, K2, V]: Aux[FieldType[K1, V] :: T, K1, K2, FieldType[K2, V] :: T] =
+ new Renamer[FieldType[K1, V]:: T, K1, K2] {
+ type Out = FieldType[K2, V] :: T
+ def apply(l: FieldType[K1, V] :: T): Out = field[K2](l.head: V) :: l.tail
+ }
+
+ implicit def hlistRenamer[H, T <: HList, K1, K2, V](implicit rn: Renamer[T, K1, K2]): Aux[H :: T, K1, K2, H :: rn.Out] =
+ new Renamer[H :: T, K1, K2] {
+ type Out = H :: rn.Out
+ def apply(l: H :: T): Out = l.head :: rn(l.tail)
+ }
+ }
+
+ /**
+ * Type class supporting collecting the keys of a record as an `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Keys[L <: HList] extends DepFn0 { type Out <: HList }
+
+ object Keys {
+ def apply[L <: HList](implicit keys: Keys[L]): Aux[L, keys.Out] = keys
+
+ type Aux[L <: HList, Out0 <: HList] = Keys[L] { type Out = Out0 }
+
+ implicit def hnilKeys[L <: HNil]: Aux[L, HNil] =
+ new Keys[L] {
+ type Out = HNil
+ def apply(): Out = HNil
+ }
+
+ implicit def hlistKeys[K, V, T <: HList](implicit wk: Witness.Aux[K], kt: Keys[T]): Aux[FieldType[K, V] :: T, K :: kt.Out] =
+ new Keys[FieldType[K, V]:: T] {
+ type Out = K :: kt.Out
+ def apply(): Out = wk.value :: kt()
+ }
+ }
+
+ /**
+ * Type class supporting collecting the value of a record as an `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Values[L <: HList] extends DepFn1[L] { type Out <: HList }
+
+ object Values {
+ def apply[L <: HList](implicit values: Values[L]): Aux[L, values.Out] = values
+
+ type Aux[L <: HList, Out0 <: HList] = Values[L] { type Out = Out0 }
+
+ implicit def hnilValues[L <: HNil]: Aux[L, HNil] =
+ new Values[L] {
+ type Out = HNil
+ def apply(l: L): Out = HNil
+ }
+
+ implicit def hlistValues[K, V, T <: HList](implicit vt: Values[T]): Aux[FieldType[K, V] :: T, V :: vt.Out] =
+ new Values[FieldType[K, V]:: T] {
+ type Out = V :: vt.Out
+ def apply(l: FieldType[K, V] :: T): Out = (l.head: V) :: vt(l.tail)
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/traversables.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/traversables.scala
new file mode 100644
index 0000000000..09d1e908c7
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/traversables.scala
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+import scala.collection.{ GenTraversable, GenTraversableLike }
+
+object traversable {
+ /**
+ * Type class supporting type safe conversion of `Traversables` to `HLists`.
+ *
+ * @author Miles Sabin
+ */
+ trait FromTraversable[Out <: HList] {
+ def apply(l: GenTraversable[_]): Option[Out]
+ }
+
+ /**
+ * `FromTraversable` type class instances.
+ *
+ * @author Miles Sabin
+ */
+ object FromTraversable {
+ def apply[Out <: HList](implicit from: FromTraversable[Out]) = from
+
+ import syntax.typeable._
+
+ implicit def hnilFromTraversable[T] = new FromTraversable[HNil] {
+ def apply(l: GenTraversable[_]) =
+ if (l.isEmpty) Some(HNil) else None
+ }
+
+ implicit def hlistFromTraversable[OutH, OutT <: HList](implicit flt: FromTraversable[OutT], oc: Typeable[OutH]) = new FromTraversable[OutH :: OutT] {
+ def apply(l: GenTraversable[_]): Option[OutH :: OutT] =
+ if (l.isEmpty) None
+ else for (h ← l.head.cast[OutH]; t ← flt(l.tail)) yield h :: t
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/tuples.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/tuples.scala
new file mode 100644
index 0000000000..25afd0a175
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/tuples.scala
@@ -0,0 +1,804 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+object tuple {
+ import akka.shapeless.ops.{ hlist ⇒ hl }
+
+ /**
+ * Type class witnessing that this tuple is composite and providing access to head and tail.
+ *
+ * @author Miles Sabin
+ */
+ trait IsComposite[P] {
+ type H
+ type T
+
+ def head(p: P): H
+ def tail(p: P): T
+ }
+
+ object IsComposite {
+ def apply[P](implicit isComp: IsComposite[P]): Aux[P, isComp.H, isComp.T] = isComp
+
+ type Aux[P, H0, T0] = IsComposite[P] { type H = H0; type T = T0 }
+
+ implicit def isComposite[P, L <: HList, H0, T <: HList](implicit gen: Generic.Aux[P, L], isHCons: hl.IsHCons.Aux[L, H0, T], tp: hl.Tupler[T]): Aux[P, H0, tp.Out] =
+ new IsComposite[P] {
+ type H = H0
+ type T = tp.Out
+ def head(p: P): H = isHCons.head(gen.to(p))
+ def tail(p: P): T = tp(isHCons.tail(gen.to(p)))
+ }
+ }
+
+ /**
+ * Type class supporting prepending to this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait Prepend[T, U] extends DepFn2[T, U]
+
+ object Prepend {
+ def apply[T, U](implicit prepend: Prepend[T, U]): Aux[T, U, prepend.Out] = prepend
+
+ type Aux[T, U, Out0] = Prepend[T, U] { type Out = Out0 }
+
+ implicit def prepend[T, L1 <: HList, U, L2 <: HList, L3 <: HList](implicit gent: Generic.Aux[T, L1], genu: Generic.Aux[U, L2], prepend: hl.Prepend.Aux[L1, L2, L3], tp: hl.Tupler[L3]): Aux[T, U, tp.Out] =
+ new Prepend[T, U] {
+ type Out = tp.Out
+ def apply(t: T, u: U): Out = prepend(gent.to(t), genu.to(u)).tupled
+ }
+ }
+
+ /**
+ * Type class supporting reverse prepending to this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait ReversePrepend[T, U] extends DepFn2[T, U]
+
+ object ReversePrepend {
+ def apply[T, U](implicit prepend: ReversePrepend[T, U]): Aux[T, U, prepend.Out] = prepend
+
+ type Aux[T, U, Out0] = ReversePrepend[T, U] { type Out = Out0 }
+
+ implicit def prepend[T, L1 <: HList, U, L2 <: HList, L3 <: HList](implicit gent: Generic.Aux[T, L1], genu: Generic.Aux[U, L2], prepend: hl.ReversePrepend.Aux[L1, L2, L3], tp: hl.Tupler[L3]): Aux[T, U, tp.Out] =
+ new ReversePrepend[T, U] {
+ type Out = tp.Out
+ def apply(t: T, u: U): Out = prepend(gent.to(t), genu.to(u)).tupled
+ }
+ }
+
+ /**
+ * Type class supporting access to the ''nth'' element of this tuple. Available only if this tuple has at least
+ * ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait At[T, N <: Nat] extends DepFn1[T]
+
+ object At {
+ def apply[T, N <: Nat](implicit at: At[T, N]): Aux[T, N, at.Out] = at
+
+ type Aux[T, N <: Nat, Out0] = At[T, N] { type Out = Out0 }
+
+ implicit def at[T, L1 <: HList, N <: Nat](implicit gen: Generic.Aux[T, L1], at: hl.At[L1, N]): Aux[T, N, at.Out] =
+ new At[T, N] {
+ type Out = at.Out
+ def apply(t: T): Out = at(gen.to(t))
+ }
+ }
+
+ /**
+ * Type class supporting access to the last element of this tuple. Available only if this tuple has at least one
+ * element.
+ *
+ * @author Miles Sabin
+ */
+ trait Last[T] extends DepFn1[T]
+
+ object Last {
+ def apply[T](implicit last: Last[T]): Aux[T, last.Out] = last
+
+ type Aux[T, Out0] = Last[T] { type Out = Out0 }
+
+ implicit def last[T, L <: HList](implicit gen: Generic.Aux[T, L], last: hl.Last[L]): Aux[T, last.Out] =
+ new Last[T] {
+ type Out = last.Out
+ def apply(t: T): Out = gen.to(t).last
+ }
+ }
+
+ /**
+ * Type class supporting access to all but the last element of this tuple. Available only if this tuple has at
+ * least one element.
+ *
+ * @author Miles Sabin
+ */
+ trait Init[T] extends DepFn1[T]
+
+ object Init {
+ def apply[T](implicit init: Init[T]): Aux[T, init.Out] = init
+
+ type Aux[T, Out0] = Init[T] { type Out = Out0 }
+
+ implicit def init[T, L1 <: HList, L2 <: HList](implicit gen: Generic.Aux[T, L1], init: hl.Init.Aux[L1, L2], tp: hl.Tupler[L2]): Aux[T, tp.Out] =
+ new Init[T] {
+ type Out = tp.Out
+ def apply(t: T): Out = init(gen.to(t)).tupled
+ }
+ }
+
+ /**
+ * Type class supporting access to the first element of this tuple of type `U`. Available only if this tuple
+ * contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait Selector[T, U] extends DepFn1[T] { type Out = U }
+
+ object Selector {
+ def apply[T, U](implicit selector: Selector[T, U]): Aux[T, U] = selector
+
+ type Aux[T, U] = Selector[T, U]
+
+ implicit def select[T, L <: HList, U](implicit gen: Generic.Aux[T, L], selector: hl.Selector[L, U]): Aux[T, U] =
+ new Selector[T, U] {
+ def apply(t: T): U = gen.to(t).select[U]
+ }
+ }
+
+ /**
+ * Type class supporting access to the all elements of this tuple of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait Filter[T, U] extends DepFn1[T]
+
+ object Filter {
+ def apply[T, U](implicit filter: Filter[T, U]): Aux[T, U, filter.Out] = filter
+
+ type Aux[T, U, Out0] = Filter[T, U] { type Out = Out0 }
+
+ implicit def filterTuple[T, L1 <: HList, U, L2 <: HList](implicit gen: Generic.Aux[T, L1], filter: hl.Filter.Aux[L1, U, L2], tp: hl.Tupler[L2]): Aux[T, U, tp.Out] = new Filter[T, U] {
+ type Out = tp.Out
+ def apply(t: T): Out = tp(filter(gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting access to the all elements of this tuple of type different than `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait FilterNot[T, U] extends DepFn1[T]
+
+ object FilterNot {
+ def apply[T, U](implicit filter: FilterNot[T, U]): Aux[T, U, filter.Out] = filter
+
+ type Aux[T, U, Out0] = FilterNot[T, U] { type Out = Out0 }
+
+ implicit def filterNotTuple[T, L1 <: HList, U, L2 <: HList](implicit gen: Generic.Aux[T, L1], filterNot: hl.FilterNot.Aux[L1, U, L2], tp: hl.Tupler[L2]): Aux[T, U, tp.Out] = new FilterNot[T, U] {
+ type Out = tp.Out
+ def apply(t: T): Out = tp(filterNot(gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting removal of an element from this tuple. Available only if this tuple contains an
+ * element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait Remove[T, U] extends DepFn1[T]
+
+ object Remove {
+ def apply[T, E](implicit remove: Remove[T, E]): Aux[T, E, remove.Out] = remove
+
+ type Aux[T, U, Out0] = Remove[T, U] { type Out = Out0 }
+
+ implicit def removeTuple[T, L1 <: HList, U, L2 <: HList](implicit gen: Generic.Aux[T, L1], remove: hl.Remove.Aux[L1, U, (U, L2)], tp: hl.Tupler[L2]): Aux[T, U, (U, tp.Out)] = new Remove[T, U] {
+ type Out = (U, tp.Out)
+ def apply(t: T): Out = { val (u, rem) = remove(gen.to(t)); (u, tp(rem)) }
+ }
+ }
+
+ /**
+ * Type class supporting removal of a sublist from this tuple. Available only if this tuple contains a
+ * sublist of type `SL`.
+ *
+ * The elements of `SL` do not have to be contiguous in this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait RemoveAll[T, S] extends DepFn1[T]
+
+ object RemoveAll {
+ def apply[T, S](implicit remove: RemoveAll[T, S]): Aux[T, S, remove.Out] = remove
+
+ type Aux[T, S, Out0] = RemoveAll[T, S] { type Out = Out0 }
+
+ implicit def removeAllTuple[T, ST, SL <: HList, L1 <: HList, L2 <: HList](implicit gent: Generic.Aux[T, L1], gens: Generic.Aux[ST, SL], removeAll: hl.RemoveAll.Aux[L1, SL, (SL, L2)], tp: hl.Tupler[L2]): Aux[T, ST, (ST, tp.Out)] =
+ new RemoveAll[T, ST] {
+ type Out = (ST, tp.Out)
+ def apply(t: T): Out = { val (e, rem) = removeAll(gent.to(t)); (gens.from(e), tp(rem)) }
+ }
+ }
+
+ /**
+ * Type class supporting replacement of the first element of type U from this tuple with an element of type V.
+ * Available only if this tuple contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait Replacer[T, U, V] extends DepFn2[T, U]
+
+ object Replacer {
+ def apply[T, U, V](implicit replacer: Replacer[T, U, V]): Aux[T, U, V, replacer.Out] = replacer
+
+ type Aux[T, U, V, Out0] = Replacer[T, U, V] { type Out = Out0 }
+
+ implicit def replaceTuple[T, L1 <: HList, U, V, L2 <: HList](implicit gen: Generic.Aux[T, L1], replace: hl.Replacer.Aux[L1, V, U, (V, L2)], tp: hl.Tupler[L2]): Aux[T, U, V, (V, tp.Out)] = new Replacer[T, U, V] {
+ type Out = (V, tp.Out)
+ def apply(t: T, u: U): Out = { val (v, rep) = replace(gen.to(t), u); (v, tp(rep)) }
+ }
+ }
+
+ /**
+ * Type class supporting replacement of the Nth element of this tuple with an element of type V. Available only if
+ * this tuple contains at least N elements.
+ *
+ * @author Miles Sabin
+ */
+ trait ReplaceAt[T, N <: Nat, U] extends DepFn2[T, U]
+
+ object ReplaceAt {
+ def apply[T, N <: Nat, V](implicit replacer: ReplaceAt[T, N, V]): Aux[T, N, V, replacer.Out] = replacer
+
+ type Aux[T, N <: Nat, U, Out0] = ReplaceAt[T, N, U] { type Out = Out0 }
+
+ implicit def replaceTuple[T, L1 <: HList, N <: Nat, U, V, L2 <: HList](implicit gen: Generic.Aux[T, L1], replaceAt: hl.ReplaceAt.Aux[L1, N, U, (V, L2)], tp: hl.Tupler[L2]): Aux[T, N, U, (V, tp.Out)] = new ReplaceAt[T, N, U] {
+ type Out = (V, tp.Out)
+ def apply(t: T, u: U): Out = { val (v, rep) = replaceAt(gen.to(t), u); (v, tp(rep)) }
+ }
+ }
+
+ /**
+ * Type class supporting retrieval of the first ''n'' elements of this tuple. Available only if this tuple has at
+ * least ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait Take[T, N <: Nat] extends DepFn1[T]
+
+ object Take {
+ def apply[T, N <: Nat](implicit take: Take[T, N]): Aux[T, N, take.Out] = take
+
+ type Aux[T, N <: Nat, Out0] = Take[T, N] { type Out = Out0 }
+
+ implicit def tupleTake[T, L1 <: HList, N <: Nat, L2 <: HList](implicit gen: Generic.Aux[T, L1], take: hl.Take.Aux[L1, N, L2], tp: hl.Tupler[L2]): Aux[T, N, tp.Out] =
+ new Take[T, N] {
+ type Out = tp.Out
+ def apply(t: T): tp.Out = tp(take(gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting removal of the first ''n'' elements of this tuple. Available only if this tuple has at
+ * least ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait Drop[T, N <: Nat] extends DepFn1[T]
+
+ object Drop {
+ def apply[T, N <: Nat](implicit drop: Drop[T, N]): Aux[T, N, drop.Out] = drop
+
+ type Aux[T, N <: Nat, Out0] = Drop[T, N] { type Out = Out0 }
+
+ implicit def tupleDrop[T, L1 <: HList, N <: Nat, L2 <: HList](implicit gen: Generic.Aux[T, L1], drop: hl.Drop.Aux[L1, N, L2], tp: hl.Tupler[L2]): Aux[T, N, tp.Out] =
+ new Drop[T, N] {
+ type Out = tp.Out
+ def apply(t: T): tp.Out = tp(drop(gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting splitting this tuple at the ''nth'' element returning the prefix and suffix as a pair.
+ * Available only if this tuple has at least ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait Split[T, N <: Nat] extends DepFn1[T]
+
+ object Split {
+ def apply[T, N <: Nat](implicit split: Split[T, N]): Aux[T, N, split.Out] = split
+
+ type Aux[T, N <: Nat, Out0] = Split[T, N] { type Out = Out0 }
+
+ implicit def tupleSplit[T, L <: HList, N <: Nat, LP <: HList, LS <: HList](implicit gen: Generic.Aux[T, L], split: hl.Split.Aux[L, N, (LP, LS)], tpp: hl.Tupler[LP], tps: hl.Tupler[LS]): Aux[T, N, (tpp.Out, tps.Out)] =
+ new Split[T, N] {
+ type Out = (tpp.Out, tps.Out)
+ def apply(t: T): Out = { val (p, s) = split(gen.to(t)); (tpp(p), tps(s)) }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this tuple at the ''nth'' element returning the reverse prefix and suffix as a
+ * pair. Available only if this tuple has at least ''n'' elements.
+ *
+ * @author Miles Sabin
+ */
+ trait ReverseSplit[T, N <: Nat] extends DepFn1[T]
+
+ object ReverseSplit {
+ def apply[T, N <: Nat](implicit split: ReverseSplit[T, N]): Aux[T, N, split.Out] = split
+
+ type Aux[T, N <: Nat, Out0] = ReverseSplit[T, N] { type Out = Out0 }
+
+ implicit def tupleReverseSplit[T, L <: HList, N <: Nat, LP <: HList, LS <: HList](implicit gen: Generic.Aux[T, L], split: hl.ReverseSplit.Aux[L, N, (LP, LS)], tpp: hl.Tupler[LP], tps: hl.Tupler[LS]): Aux[T, N, (tpp.Out, tps.Out)] =
+ new ReverseSplit[T, N] {
+ type Out = (tpp.Out, tps.Out)
+ def apply(t: T): Out = { val (p, s) = split(gen.to(t)); (tpp(p), tps(s)) }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this tuple at the first occurence of an element of type `U` returning the prefix
+ * and suffix as a pair. Available only if this tuple contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait SplitLeft[T, U] extends DepFn1[T]
+
+ object SplitLeft {
+ def apply[T, U](implicit split: SplitLeft[T, U]): Aux[T, U, split.Out] = split
+
+ type Aux[T, U, Out0] = SplitLeft[T, U] { type Out = Out0 }
+
+ implicit def tupleSplitLeft[T, L <: HList, U, LP <: HList, LS <: HList](implicit gen: Generic.Aux[T, L], split: hl.SplitLeft.Aux[L, U, (LP, LS)], tpp: hl.Tupler[LP], tps: hl.Tupler[LS]): Aux[T, U, (tpp.Out, tps.Out)] =
+ new SplitLeft[T, U] {
+ type Out = (tpp.Out, tps.Out)
+ def apply(t: T): Out = { val (p, s) = split(gen.to(t)); (tpp(p), tps(s)) }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this tuple at the first occurence of an element of type `U` returning the reverse
+ * prefix and suffix as a pair. Available only if this tuple contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait ReverseSplitLeft[T, U] extends DepFn1[T]
+
+ object ReverseSplitLeft {
+ def apply[T, U](implicit split: ReverseSplitLeft[T, U]): Aux[T, U, split.Out] = split
+
+ type Aux[T, U, Out0] = ReverseSplitLeft[T, U] { type Out = Out0 }
+
+ implicit def tupleReverseSplitLeft[T, L <: HList, U, LP <: HList, LS <: HList](implicit gen: Generic.Aux[T, L], split: hl.ReverseSplitLeft.Aux[L, U, (LP, LS)], tpp: hl.Tupler[LP], tps: hl.Tupler[LS]): Aux[T, U, (tpp.Out, tps.Out)] =
+ new ReverseSplitLeft[T, U] {
+ type Out = (tpp.Out, tps.Out)
+ def apply(t: T): Out = { val (p, s) = split(gen.to(t)); (tpp(p), tps(s)) }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this tuple at the last occurence of an element of type `U` returning the prefix
+ * and suffix as a pair. Available only if this tuple contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait SplitRight[T, U] extends DepFn1[T]
+
+ object SplitRight {
+ def apply[T, U](implicit split: SplitRight[T, U]): Aux[T, U, split.Out] = split
+
+ type Aux[T, U, Out0] = SplitRight[T, U] { type Out = Out0 }
+
+ implicit def tupleSplitRight[T, L <: HList, U, LP <: HList, LS <: HList](implicit gen: Generic.Aux[T, L], split: hl.SplitRight.Aux[L, U, (LP, LS)], tpp: hl.Tupler[LP], tps: hl.Tupler[LS]): Aux[T, U, (tpp.Out, tps.Out)] =
+ new SplitRight[T, U] {
+ type Out = (tpp.Out, tps.Out)
+ def apply(t: T): Out = { val (p, s) = split(gen.to(t)); (tpp(p), tps(s)) }
+ }
+ }
+
+ /**
+ * Type class supporting splitting this tuple at the last occurence of an element of type `U` returning the reverse
+ * prefix and suffix as a pair. Available only if this tuple contains an element of type `U`.
+ *
+ * @author Miles Sabin
+ */
+ trait ReverseSplitRight[T, U] extends DepFn1[T]
+
+ object ReverseSplitRight {
+ def apply[T, U](implicit split: ReverseSplitRight[T, U]): Aux[T, U, split.Out] = split
+
+ type Aux[T, U, Out0] = ReverseSplitRight[T, U] { type Out = Out0 }
+
+ implicit def tupleReverseSplitRight[T, L <: HList, U, LP <: HList, LS <: HList](implicit gen: Generic.Aux[T, L], split: hl.ReverseSplitRight.Aux[L, U, (LP, LS)], tpp: hl.Tupler[LP], tps: hl.Tupler[LS]): Aux[T, U, (tpp.Out, tps.Out)] =
+ new ReverseSplitRight[T, U] {
+ type Out = (tpp.Out, tps.Out)
+ def apply(t: T): Out = { val (p, s) = split(gen.to(t)); (tpp(p), tps(s)) }
+ }
+ }
+
+ /**
+ * Type class supporting reversing this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait Reverse[T] extends DepFn1[T]
+
+ object Reverse {
+ def apply[T](implicit reverse: Reverse[T]): Aux[T, reverse.Out] = reverse
+
+ type Aux[T, Out0] = Reverse[T] { type Out = Out0 }
+
+ implicit def tupleReverseAux[T, L1 <: HList, L2 <: HList, Out](implicit gen: Generic.Aux[T, L1], reverse: hl.Reverse.Aux[L1, L2], tp: hl.Tupler[L2]): Aux[T, tp.Out] =
+ new Reverse[T] {
+ type Out = tp.Out
+ def apply(t: T): tp.Out = tp(reverse(gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting mapping a higher ranked function over this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait Mapper[T, P] extends DepFn1[T]
+
+ object Mapper {
+ def apply[T, P](implicit mapper: Mapper[T, P]): Aux[T, P, mapper.Out] = mapper
+
+ type Aux[T, P, Out0] = Mapper[T, P] { type Out = Out0 }
+
+ implicit def mapper[T, P, L1 <: HList, L2 <: HList](implicit gen: Generic.Aux[T, L1], mapper: hl.Mapper.Aux[P, L1, L2], tp: hl.Tupler[L2]): Aux[T, P, tp.Out] =
+ new Mapper[T, P] {
+ type Out = tp.Out
+ def apply(t: T): tp.Out = tp(mapper(gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting flatmapping a higher ranked function over this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait FlatMapper[T, P] extends DepFn1[T]
+
+ object FlatMapper {
+ def apply[T, P](implicit mapper: FlatMapper[T, P]): Aux[T, P, mapper.Out] = mapper
+
+ import poly.Compose
+
+ type Aux[T, P, Out0] = FlatMapper[T, P] { type Out = Out0 }
+
+ implicit def mapper[T, P, L1 <: HList, L2 <: HList](implicit gen: Generic.Aux[T, L1], mapper: hl.FlatMapper.Aux[Compose[productElements.type, P], L1, L2], tp: hl.Tupler[L2]): Aux[T, P, tp.Out] =
+ new FlatMapper[T, P] {
+ type Out = tp.Out
+ def apply(t: T): tp.Out = tp(mapper(gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting mapping a constant valued function over this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait ConstMapper[T, C] extends DepFn2[T, C]
+
+ object ConstMapper {
+ def apply[T, C](implicit mapper: ConstMapper[T, C]): Aux[T, C, mapper.Out] = mapper
+
+ type Aux[T, C, Out0] = ConstMapper[T, C] { type Out = Out0 }
+
+ implicit def mapper[T, C, L1 <: HList, L2 <: HList](implicit gen: Generic.Aux[T, L1], mapper: hl.ConstMapper.Aux[C, L1, L2], tp: hl.Tupler[L2]): Aux[T, C, tp.Out] =
+ new ConstMapper[T, C] {
+ type Out = tp.Out
+ def apply(t: T, c: C): tp.Out = tp(mapper(c, gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting mapping a polymorphic function over this tuple and then folding the result using a
+ * monomorphic function value.
+ *
+ * @author Miles Sabin
+ */
+ trait MapFolder[T, R, P] { // Nb. Not a dependent function signature
+ def apply(t: T, in: R, op: (R, R) ⇒ R): R
+ }
+
+ object MapFolder {
+ def apply[T, R, P](implicit folder: MapFolder[T, R, P]) = folder
+
+ implicit def mapper[T, L <: HList, R, P](implicit gen: Generic.Aux[T, L], mapper: hl.MapFolder[L, R, P]): MapFolder[T, R, P] =
+ new MapFolder[T, R, P] {
+ def apply(t: T, in: R, op: (R, R) ⇒ R): R = mapper(gen.to(t), in, op)
+ }
+ }
+
+ /**
+ * Type class supporting left-folding a polymorphic binary function over this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait LeftFolder[T, U, P] extends DepFn2[T, U]
+
+ object LeftFolder {
+ def apply[T, U, P](implicit folder: LeftFolder[T, U, P]): Aux[T, U, P, folder.Out] = folder
+
+ type Aux[T, U, P, Out0] = LeftFolder[T, U, P] { type Out = Out0 }
+
+ implicit def folder[T, L <: HList, U, P](implicit gen: Generic.Aux[T, L], folder: hl.LeftFolder[L, U, P]): Aux[T, U, P, folder.Out] =
+ new LeftFolder[T, U, P] {
+ type Out = folder.Out
+ def apply(t: T, u: U): Out = folder(gen.to(t), u)
+ }
+ }
+
+ /**
+ * Type class supporting right-folding a polymorphic binary function over this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait RightFolder[T, U, P] extends DepFn2[T, U]
+
+ object RightFolder {
+ def apply[T, U, P](implicit folder: RightFolder[T, U, P]): Aux[T, U, P, folder.Out] = folder
+
+ type Aux[T, U, P, Out0] = RightFolder[T, U, P] { type Out = Out0 }
+
+ implicit def folder[T, L <: HList, U, P](implicit gen: Generic.Aux[T, L], folder: hl.RightFolder[L, U, P]): Aux[T, U, P, folder.Out] =
+ new RightFolder[T, U, P] {
+ type Out = folder.Out
+ def apply(t: T, u: U): Out = folder(gen.to(t), u)
+ }
+ }
+
+ /**
+ * Type class supporting left-reducing a polymorphic binary function over this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait LeftReducer[T, P] extends DepFn1[T]
+
+ object LeftReducer {
+ def apply[T, P](implicit reducer: LeftReducer[T, P]): Aux[T, P, reducer.Out] = reducer
+
+ type Aux[T, P, Out0] = LeftReducer[T, P] { type Out = Out0 }
+
+ implicit def folder[T, L <: HList, P](implicit gen: Generic.Aux[T, L], folder: hl.LeftReducer[L, P]): Aux[T, P, folder.Out] =
+ new LeftReducer[T, P] {
+ type Out = folder.Out
+ def apply(t: T): Out = folder(gen.to(t))
+ }
+ }
+
+ /**
+ * Type class supporting right-reducing a polymorphic binary function over this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait RightReducer[T, P] extends DepFn1[T]
+
+ object RightReducer {
+ def apply[T, P](implicit reducer: RightReducer[T, P]): Aux[T, P, reducer.Out] = reducer
+
+ type Aux[T, P, Out0] = RightReducer[T, P] { type Out = Out0 }
+
+ implicit def folder[T, L <: HList, P](implicit gen: Generic.Aux[T, L], folder: hl.RightReducer[L, P]): Aux[T, P, folder.Out] =
+ new RightReducer[T, P] {
+ type Out = folder.Out
+ def apply(t: T): Out = folder(gen.to(t))
+ }
+ }
+
+ /**
+ * Type class supporting transposing this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait Transposer[T] extends DepFn1[T]
+
+ object Transposer {
+ def apply[T](implicit transposer: Transposer[T]): Aux[T, transposer.Out] = transposer
+
+ type Aux[T, Out0] = Transposer[T] { type Out = Out0 }
+
+ implicit def transpose[T, L1 <: HList, L2 <: HList, L3 <: HList, L4 <: HList](implicit gen: Generic.Aux[T, L1],
+ mpe: hl.Mapper.Aux[productElements.type, L1, L2],
+ tps: hl.Transposer.Aux[L2, L3],
+ mtp: hl.Mapper.Aux[tupled.type, L3, L4],
+ tp: hl.Tupler[L4]): Aux[T, tp.Out] =
+ new Transposer[T] {
+ type Out = tp.Out
+ def apply(t: T): Out = ((gen.to(t) map productElements).transpose map tupled).tupled
+ }
+ }
+
+ /**
+ * Type class supporting zipping this this tuple of monomorphic function values with its argument tuple of
+ * correspondingly typed function arguments returning the result of each application as a tuple. Available only if
+ * there is evidence that the corresponding function and argument elements have compatible types.
+ *
+ * @author Miles Sabin
+ */
+ trait ZipApply[FT, AT] extends DepFn2[FT, AT]
+
+ object ZipApply {
+ def apply[FT, AT](implicit zip: ZipApply[FT, AT]): Aux[FT, AT, zip.Out] = zip
+
+ type Aux[FT, AT, Out0] = ZipApply[FT, AT] { type Out = Out0 }
+
+ implicit def zipApply[FT, FL <: HList, AT, AL <: HList, RL <: HList](implicit genf: Generic.Aux[FT, FL],
+ gena: Generic.Aux[AT, AL],
+ zip: hl.ZipApply.Aux[FL, AL, RL],
+ tp: hl.Tupler[RL]): Aux[FT, AT, tp.Out] =
+ new ZipApply[FT, AT] {
+ type Out = tp.Out
+ def apply(ft: FT, at: AT): Out = (genf.to(ft) zipApply gena.to(at)).tupled
+ }
+ }
+
+ /**
+ * Type class supporting zipping this tuple with a tuple of tuples returning a tuple of tuples with each
+ * element of this tuple prepended to the corresponding tuple element of the argument tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait ZipOne[H, T] extends DepFn2[H, T]
+
+ object ZipOne {
+ def apply[H, T](implicit zip: ZipOne[H, T]): Aux[H, T, zip.Out] = zip
+
+ type Aux[H, T, Out0] = ZipOne[H, T] { type Out = Out0 }
+
+ implicit def zipOne[HT, HL <: HList, TT, TL <: HList, TLL <: HList, RLL <: HList, RL <: HList](implicit genh: Generic.Aux[HT, HL],
+ gent: Generic.Aux[TT, TL],
+ mpet: hl.Mapper.Aux[productElements.type, TL, TLL],
+ zone: hl.ZipOne.Aux[HL, TLL, RLL],
+ mtp: hl.Mapper.Aux[tupled.type, RLL, RL],
+ tp: hl.Tupler[RL]): Aux[HT, TT, tp.Out] =
+ new ZipOne[HT, TT] {
+ type Out = tp.Out
+ def apply(h: HT, t: TT): Out = ((genh.to(h) zipOne (gent.to(t) map productElements)) map tupled).tupled
+ }
+ }
+
+ /**
+ * Type class supporting zipping a tuple with a constant, resulting in a tuple of tuples of the form
+ * ({element from input tuple}, {supplied constant})
+ *
+ * @author Miles Sabin
+ */
+ trait ZipConst[T, C] extends DepFn2[T, C]
+
+ object ZipConst {
+ def apply[T, C](implicit zip: ZipConst[T, C]): Aux[T, C, zip.Out] = zip
+
+ type Aux[T, C, Out0] = ZipConst[T, C] { type Out = Out0 }
+
+ implicit def zipConst[T, C, L1 <: HList, L2 <: HList](implicit gen: Generic.Aux[T, L1], zipper: hl.ZipConst.Aux[C, L1, L2], tp: hl.Tupler[L2]): Aux[T, C, tp.Out] =
+ new ZipConst[T, C] {
+ type Out = tp.Out
+ def apply(t: T, c: C): tp.Out = tp(zipper(c, gen.to(t)))
+ }
+ }
+
+ /**
+ * Type class supporting unification of this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait Unifier[T] extends DepFn1[T]
+
+ object Unifier {
+ def apply[T](implicit unifier: Unifier[T]): Aux[T, unifier.Out] = unifier
+
+ type Aux[T, Out0] = Unifier[T] { type Out = Out0 }
+
+ implicit def unifier[T, L1 <: HList, L2 <: HList](implicit gen: Generic.Aux[T, L1], unifier: hl.Unifier.Aux[L1, L2], tp: hl.Tupler[L2]): Aux[T, tp.Out] =
+ new Unifier[T] {
+ type Out = tp.Out
+ def apply(t: T): Out = unifier(gen.to(t)).tupled
+ }
+ }
+
+ /**
+ * Type class supporting unification of all elements that are subtypes of `B` in this tuple to `B`, with all other
+ * elements left unchanged.
+ *
+ * @author Miles Sabin
+ */
+ trait SubtypeUnifier[T, B] extends DepFn1[T]
+
+ object SubtypeUnifier {
+ def apply[T, B](implicit unifier: SubtypeUnifier[T, B]): Aux[T, B, unifier.Out] = unifier
+
+ type Aux[T, B, Out0] = SubtypeUnifier[T, B] { type Out = Out0 }
+
+ implicit def subtypeUnifier[T, B, L1 <: HList, L2 <: HList](implicit gen: Generic.Aux[T, L1], unifier: hl.SubtypeUnifier.Aux[L1, B, L2], tp: hl.Tupler[L2]): Aux[T, B, tp.Out] =
+ new SubtypeUnifier[T, B] {
+ type Out = tp.Out
+ def apply(t: T): Out = unifier(gen.to(t)).tupled
+ }
+ }
+
+ /**
+ * Type class supporting computing the type-level Nat corresponding to the length of this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait Length[T] extends DepFn1[T]
+
+ object Length {
+ def apply[T](implicit length: Length[T]): Aux[T, length.Out] = length
+
+ type Aux[T, Out0] = Length[T] { type Out = Out0 }
+
+ implicit def length[T, L <: HList](implicit gen: Generic.Aux[T, L], length: hl.Length[L]): Aux[T, length.Out] =
+ new Length[T] {
+ type Out = length.Out
+ def apply(t: T): Out = length()
+ }
+ }
+
+ /**
+ * Type class supporting conversion of this tuple to a `List` with elements typed as the least upper bound
+ * of the types of the elements of this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait ToList[T, Lub] extends DepFn1[T]
+
+ object ToList {
+ def apply[T, Lub](implicit toList: ToList[T, Lub]) = toList
+
+ type Aux[T, Lub, Out0] = ToList[T, Lub] { type Out = Out0 }
+
+ implicit def toList[T, L <: HList, Lub](implicit gen: Generic.Aux[T, L], toList: hl.ToList[L, Lub]): Aux[T, Lub, List[Lub]] =
+ new ToList[T, Lub] {
+ type Out = List[Lub]
+ def apply(t: T): Out = gen.to(t).toList[Lub]
+ }
+ }
+
+ /**
+ * Type class supporting conversion of this tuple to an `Array` with elements typed as the least upper bound
+ * of the types of the elements of this tuple.
+ *
+ * @author Miles Sabin
+ */
+ trait ToArray[T, Lub] extends DepFn1[T]
+
+ object ToArray {
+ def apply[T, Lub](implicit toArray: ToArray[T, Lub]) = toArray
+
+ type Aux[T, Lub, Out0] = ToArray[T, Lub] { type Out = Out0 }
+
+ implicit def toArray[T, L <: HList, Lub](implicit gen: Generic.Aux[T, L], toArray: hl.ToArray[L, Lub]): Aux[T, Lub, Array[Lub]] =
+ new ToArray[T, Lub] {
+ type Out = Array[Lub]
+ def apply(t: T): Out = gen.to(t).toArray[Lub]
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/unions.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/unions.scala
new file mode 100644
index 0000000000..f6489afa69
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/unions.scala
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2014 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+object union {
+ import akka.shapeless.record.FieldType
+
+ /**
+ * Type class supporting union member selection.
+ *
+ * @author Miles Sabin
+ */
+ @annotation.implicitNotFound(msg = "No field ${K} in union ${C}")
+ trait Selector[C <: Coproduct, K] {
+ type V
+ type Out = Option[V]
+ def apply(l: C): Out
+ }
+
+ trait LowPrioritySelector {
+ type Aux[C <: Coproduct, K, V0] = Selector[C, K] { type V = V0 }
+
+ implicit def tlSelector[H, T <: Coproduct, K](implicit st: Selector[T, K]): Aux[H :+: T, K, st.V] =
+ new Selector[H :+: T, K] {
+ type V = st.V
+ def apply(u: H :+: T): Out = u match {
+ case Inl(l) ⇒ None
+ case Inr(r) ⇒ if (st == null) None else st(r)
+ }
+ }
+ }
+
+ object Selector extends LowPrioritySelector {
+ def apply[C <: Coproduct, K](implicit selector: Selector[C, K]): Aux[C, K, selector.V] = selector
+
+ implicit def hdSelector[K, V0, T <: Coproduct]: Aux[FieldType[K, V0] :+: T, K, V0] =
+ new Selector[FieldType[K, V0]:+: T, K] {
+ type V = V0
+ def apply(u: FieldType[K, V] :+: T): Out = u match {
+ case Inl(l) ⇒ Some(l)
+ case Inr(r) ⇒ None
+ }
+ }
+ }
+
+ /**
+ * Type class supporting collecting the keys of a union as an `HList`.
+ *
+ * @author Miles Sabin
+ */
+ trait Keys[U <: Coproduct] extends DepFn0 { type Out <: HList }
+
+ object Keys {
+ def apply[U <: Coproduct](implicit keys: Keys[U]): Aux[U, keys.Out] = keys
+
+ type Aux[U <: Coproduct, Out0 <: HList] = Keys[U] { type Out = Out0 }
+
+ implicit def cnilKeys[U <: CNil]: Aux[U, HNil] =
+ new Keys[U] {
+ type Out = HNil
+ def apply(): Out = HNil
+ }
+
+ implicit def coproductKeys[K, V, T <: Coproduct](implicit wk: Witness.Aux[K], kt: Keys[T]): Aux[FieldType[K, V] :+: T, K :: kt.Out] =
+ new Keys[FieldType[K, V]:+: T] {
+ type Out = K :: kt.Out
+ def apply(): Out = wk.value :: kt()
+ }
+ }
+
+ /**
+ * Type class supporting collecting the value of a union as an `Coproduct`.
+ *
+ * @author Miles Sabin
+ */
+ trait Values[U <: Coproduct] extends DepFn1[U] { type Out <: Coproduct }
+
+ object Values {
+ def apply[U <: Coproduct](implicit values: Values[U]): Aux[U, values.Out] = values
+
+ type Aux[U <: Coproduct, Out0 <: Coproduct] = Values[U] { type Out = Out0 }
+
+ implicit def cnilValues[U <: CNil]: Aux[U, CNil] =
+ new Values[U] {
+ type Out = CNil
+ def apply(u: U): Out = u
+ }
+
+ implicit def coproductValues[K, V, T <: Coproduct](implicit vt: Values[T]): Aux[FieldType[K, V] :+: T, V :+: vt.Out] =
+ new Values[FieldType[K, V]:+: T] {
+ type Out = V :+: vt.Out
+ def apply(l: FieldType[K, V] :+: T): Out = l match {
+ case Inl(l) ⇒ Inl(l)
+ case Inr(r) ⇒ Inr(vt(r))
+ }
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/ops/zipper.scala b/akka-parsing/src/main/scala/akka/shapeless/ops/zipper.scala
new file mode 100644
index 0000000000..398cb150e1
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/ops/zipper.scala
@@ -0,0 +1,204 @@
+/*
+ * Copyright (c) 2012-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package ops
+
+import hlist.{ IsHCons, ReversePrepend, Split, SplitLeft }
+
+object zipper {
+ trait Right[Z] extends DepFn1[Z]
+
+ object Right {
+ implicit def right[C, L <: HList, RH, RT <: HList, P] = new Right[Zipper[C, L, RH :: RT, P]] {
+ type Out = Zipper[C, RH :: L, RT, P]
+ def apply(z: Zipper[C, L, RH :: RT, P]) = Zipper(z.suffix.head :: z.prefix, z.suffix.tail, z.parent)
+ }
+ }
+
+ trait Left[Z] extends DepFn1[Z]
+
+ object Left {
+ implicit def left[C, LH, LT <: HList, R <: HList, P] = new Left[Zipper[C, LH :: LT, R, P]] {
+ type Out = Zipper[C, LT, LH :: R, P]
+ def apply(z: Zipper[C, LH :: LT, R, P]) = Zipper(z.prefix.tail, z.prefix.head :: z.suffix, z.parent)
+ }
+ }
+
+ trait First[Z] extends DepFn1[Z]
+
+ object First {
+ implicit def first[C, L <: HList, R <: HList, RP <: HList, P](implicit rp: ReversePrepend.Aux[L, R, RP]) =
+ new First[Zipper[C, L, R, P]] {
+ type Out = Zipper[C, HNil, RP, P]
+ def apply(z: Zipper[C, L, R, P]) = Zipper(HNil, z.prefix reverse_::: z.suffix, z.parent)
+ }
+ }
+
+ trait Last[Z] extends DepFn1[Z]
+
+ object Last {
+ implicit def last[C, L <: HList, R <: HList, RP <: HList, P](implicit rp: ReversePrepend.Aux[R, L, RP]) =
+ new Last[Zipper[C, L, R, P]] {
+ type Out = Zipper[C, RP, HNil, P]
+ def apply(z: Zipper[C, L, R, P]) = Zipper(z.suffix reverse_::: z.prefix, HNil, z.parent)
+ }
+ }
+
+ trait RightBy[Z, N <: Nat] extends DepFn1[Z]
+
+ object RightBy {
+ implicit def rightBy[C, L <: HList, R <: HList, P, N <: Nat, LP <: HList, RS <: HList](implicit split: Split.Aux[R, N, (LP, RS)], reverse: ReversePrepend[LP, L]) =
+ new RightBy[Zipper[C, L, R, P], N] {
+ type Out = Zipper[C, reverse.Out, RS, P]
+ def apply(z: Zipper[C, L, R, P]) = {
+ val (p, s) = z.suffix.split[N]
+ Zipper(p reverse_::: z.prefix, s, z.parent)
+ }
+ }
+ }
+
+ trait LeftBy[Z, N <: Nat] extends DepFn1[Z]
+
+ object LeftBy {
+ implicit def leftBy[C, L <: HList, R <: HList, P, N <: Nat, RP <: HList, LS <: HList](implicit split: Split.Aux[L, N, (RP, LS)], reverse: ReversePrepend[RP, R]) =
+ new LeftBy[Zipper[C, L, R, P], N] {
+ type Out = Zipper[C, LS, reverse.Out, P]
+ def apply(z: Zipper[C, L, R, P]) = {
+ val (p, s) = z.prefix.split[N]
+ Zipper(s, p reverse_::: z.suffix, z.parent)
+ }
+ }
+ }
+
+ trait RightTo[Z, T] extends DepFn1[Z]
+
+ object RightTo {
+ implicit def rightTo[C, L <: HList, R <: HList, P, T, LP <: HList, RS <: HList](implicit split: SplitLeft.Aux[R, T, (LP, RS)], reverse: ReversePrepend[LP, L]) =
+ new RightTo[Zipper[C, L, R, P], T] {
+ type Out = Zipper[C, reverse.Out, RS, P]
+ def apply(z: Zipper[C, L, R, P]) = {
+ val (p, s) = z.suffix.splitLeft[T]
+ Zipper(p reverse_::: z.prefix, s, z.parent)
+ }
+ }
+ }
+
+ trait LeftTo[Z, T] extends DepFn1[Z]
+
+ object LeftTo {
+ implicit def leftTo[C, L <: HList, R <: HList, P, T, RP <: HList, R0 <: HList](implicit split: SplitLeft.Aux[L, T, (RP, R0)], reverse: ReversePrepend[RP, R], cons: IsHCons[R0]) =
+ new LeftTo[Zipper[C, L, R, P], T] {
+ type Out = Zipper[C, cons.T, cons.H :: reverse.Out, P]
+ def apply(z: Zipper[C, L, R, P]) = {
+ val (p, s) = z.prefix.splitLeft[T]
+ Zipper(s.tail, s.head :: (p reverse_::: z.suffix), z.parent)
+ }
+ }
+ }
+
+ trait Up[Z] extends DepFn1[Z]
+
+ object Up {
+ implicit def up[C, L <: HList, R <: HList, P](implicit rz: Reify[Zipper[C, L, R, Some[P]]] { type Out = C }, pp: Put[P, C]) =
+ new Up[Zipper[C, L, R, Some[P]]] {
+ type Out = pp.Out
+ def apply(z: Zipper[C, L, R, Some[P]]) = pp(z.parent.get, z.reify)
+ }
+ }
+
+ trait Down[Z] extends DepFn1[Z]
+
+ object Down {
+ implicit def down[C, L <: HList, RH, RT <: HList, P, RHL <: HList](implicit gen: Generic.Aux[RH, RHL]) =
+ new Down[Zipper[C, L, RH :: RT, P]] {
+ type Out = Zipper[RH, HNil, RHL, Some[Zipper[C, L, RH :: RT, P]]]
+ def apply(z: Zipper[C, L, RH :: RT, P]) = Zipper(HNil, gen.to(z.suffix.head), Some(z))
+ }
+ }
+
+ trait Root[Z] extends DepFn1[Z]
+
+ object Root extends {
+ implicit def rootRoot[C, L <: HList, R <: HList] = new Root[Zipper[C, L, R, None.type]] {
+ type Out = Zipper[C, L, R, None.type]
+ def apply(z: Zipper[C, L, R, None.type]) = z
+ }
+
+ implicit def nonRootRoot[C, L <: HList, R <: HList, P, U](implicit up: Up[Zipper[C, L, R, Some[P]]] { type Out = U }, pr: Root[U]) =
+ new Root[Zipper[C, L, R, Some[P]]] {
+ type Out = pr.Out
+ def apply(z: Zipper[C, L, R, Some[P]]) = pr(z.up)
+ }
+ }
+
+ trait Get[Z] extends DepFn1[Z]
+
+ object Get {
+ implicit def get[C, L <: HList, RH, RT <: HList, P] = new Get[Zipper[C, L, RH :: RT, P]] {
+ type Out = RH
+ def apply(z: Zipper[C, L, RH :: RT, P]) = z.suffix.head
+ }
+ }
+
+ trait Put[Z, E] extends DepFn2[Z, E]
+
+ trait LowPriorityPut {
+ implicit def put[C, L <: HList, RH, RT <: HList, P, E, CL <: HList](implicit gen: Generic.Aux[C, CL], rp: ReversePrepend.Aux[L, E :: RT, CL]) =
+ new Put[Zipper[C, L, RH :: RT, P], E] {
+ type Out = Zipper[C, L, E :: RT, P]
+ def apply(z: Zipper[C, L, RH :: RT, P], e: E) = Zipper(z.prefix, e :: z.suffix.tail, z.parent)
+ }
+ }
+
+ object Put extends LowPriorityPut {
+ implicit def hlistPut[C <: HList, L <: HList, RH, RT <: HList, P, E, CL <: HList](implicit rp: ReversePrepend.Aux[L, E :: RT, CL]) =
+ new Put[Zipper[C, L, RH :: RT, P], E] {
+ type Out = Zipper[CL, L, E :: RT, P]
+ def apply(z: Zipper[C, L, RH :: RT, P], e: E) = Zipper(z.prefix, e :: z.suffix.tail, z.parent)
+ }
+ }
+
+ trait Insert[Z, E] extends DepFn2[Z, E]
+
+ object Insert {
+ implicit def hlistInsert[C <: HList, L <: HList, R <: HList, P, E, CL <: HList](implicit rp: ReversePrepend.Aux[E :: L, R, CL]) =
+ new Insert[Zipper[C, L, R, P], E] {
+ type Out = Zipper[CL, E :: L, R, P]
+ def apply(z: Zipper[C, L, R, P], e: E) = Zipper(e :: z.prefix, z.suffix, z.parent)
+ }
+ }
+
+ trait Delete[Z] extends DepFn1[Z]
+
+ object Delete {
+ implicit def hlistDelete[C <: HList, L <: HList, RH, RT <: HList, P, CL <: HList](implicit rp: ReversePrepend.Aux[L, RT, CL]) =
+ new Delete[Zipper[C, L, RH :: RT, P]] {
+ type Out = Zipper[CL, L, RT, P]
+ def apply(z: Zipper[C, L, RH :: RT, P]) = Zipper(z.prefix, z.suffix.tail, z.parent)
+ }
+ }
+
+ trait Reify[Z] extends DepFn1[Z]
+
+ object Reify {
+ implicit def reify[C, L <: HList, R <: HList, P, CL <: HList](implicit gen: Generic.Aux[C, CL], rp: ReversePrepend.Aux[L, R, CL]) =
+ new Reify[Zipper[C, L, R, P]] {
+ type Out = C
+ def apply(z: Zipper[C, L, R, P]) = gen.from(z.prefix reverse_::: z.suffix)
+ }
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/package.scala b/akka-parsing/src/main/scala/akka/shapeless/package.scala
new file mode 100644
index 0000000000..48c63d6d10
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/package.scala
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2013-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka
+
+package object shapeless {
+ def unexpected: Nothing = sys.error("Unexpected invocation")
+
+ // Basic definitions
+ type Id[+T] = T
+ type Const[C] = {
+ type λ[T] = C
+ }
+
+ type ¬[T] = T ⇒ Nothing
+ type ¬¬[T] = ¬[¬[T]]
+ type ∧[T, U] = T with U
+ type ∨[T, U] = ¬[¬[T] ∧ ¬[U]]
+
+ // Type-lambda for context bound
+ type |∨|[T, U] = {
+ type λ[X] = ¬¬[X] <:< (T ∨ U)
+ }
+
+ // Type inequalities
+ trait =:!=[A, B]
+
+ implicit def neq[A, B]: A =:!= B = new =:!=[A, B] {}
+ implicit def neqAmbig1[A]: A =:!= A = unexpected
+ implicit def neqAmbig2[A]: A =:!= A = unexpected
+
+ trait <:!<[A, B]
+
+ implicit def nsub[A, B]: A <:!< B = new <:!<[A, B] {}
+ implicit def nsubAmbig1[A, B >: A]: A <:!< B = unexpected
+ implicit def nsubAmbig2[A, B >: A]: A <:!< B = unexpected
+
+ // Type-lambda for context bound
+ type |¬|[T] = {
+ type λ[U] = U <:!< T
+ }
+
+ // Quantifiers
+ type ∃[P[_]] = P[T] forSome { type T }
+ type ∀[P[_]] = ¬[∃[({ type λ[X] = ¬[P[X]] })#λ]]
+
+ /** `Lens` definitions */
+ val lens = LensDefns
+
+ /** `Nat` literals */
+ val nat = Nat
+
+ /** `Poly` definitions */
+ val poly = PolyDefns
+ import poly._
+
+ /** Dependent nullary function type. */
+ trait DepFn0 {
+ type Out
+ def apply(): Out
+ }
+
+ /** Dependent unary function type. */
+ trait DepFn1[T] {
+ type Out
+ def apply(t: T): Out
+ }
+
+ /** Dependent binary function type. */
+ trait DepFn2[T, U] {
+ type Out
+ def apply(t: T, u: U): Out
+ }
+
+ /** The SYB everything combinator */
+ type Everything[F <: Poly, K <: Poly, T] = Case1[EverythingAux[F, K], T]
+
+ class ApplyEverything[F <: Poly] {
+ def apply(k: Poly): EverythingAux[F, k.type] {} = new EverythingAux[F, k.type]
+ }
+
+ def everything(f: Poly): ApplyEverything[f.type] {} = new ApplyEverything[f.type]
+
+ /** The SYB everywhere combinator */
+ type Everywhere[F <: Poly, T] = Case1[EverywhereAux[F], T]
+
+ def everywhere(f: Poly): EverywhereAux[f.type] {} = new EverywhereAux[f.type]
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/poly.scala b/akka-parsing/src/main/scala/akka/shapeless/poly.scala
new file mode 100644
index 0000000000..d0512eabbc
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/poly.scala
@@ -0,0 +1,392 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import language.existentials
+import language.experimental.macros
+
+import reflect.macros.Context
+
+// Typically the contents of this object will be imported via val alias `poly` in the shapeless package object.
+object PolyDefns extends Cases {
+ /**
+ * Type-specific case of a polymorphic function.
+ *
+ * @author Miles Sabin
+ */
+ abstract class Case[P, L <: HList] {
+ type Result
+ val value: L ⇒ Result
+
+ def apply(t: L) = value(t)
+ def apply()(implicit ev: HNil =:= L) = value(HNil)
+ def apply[T](t: T)(implicit ev: (T :: HNil) =:= L) = value(t :: HNil)
+ def apply[T, U](t: T, u: U)(implicit ev: (T :: U :: HNil) =:= L) = value(t :: u :: HNil)
+ }
+
+ object Case extends CaseInst {
+ type Aux[P, L <: HList, Result0] = Case[P, L] { type Result = Result0 }
+ type Hom[P, T] = Aux[P, T :: HNil, T]
+
+ def apply[P, L <: HList, R](v: L ⇒ R): Aux[P, L, R] = new Case[P, L] {
+ type Result = R
+ val value = v
+ }
+
+ implicit def materializeFromValue1[P, F[_], T]: Case[P, F[T] :: HNil] = macro materializeFromValueImpl[P, F[T], T]
+ implicit def materializeFromValue2[P, T]: Case[P, T :: HNil] = macro materializeFromValueImpl[P, T, T]
+
+ def materializeFromValueImpl[P: c.WeakTypeTag, FT: c.WeakTypeTag, T: c.WeakTypeTag](c: Context): c.Expr[Case[P, FT :: HNil]] = {
+ import c.universe._
+
+ val pTpe = weakTypeOf[P]
+ val ftTpe = weakTypeOf[FT]
+ val tTpe = weakTypeOf[T]
+
+ val recTpe = weakTypeOf[Case[P, FT :: HNil]]
+ if (c.openImplicits.tail.exists(_._1 =:= recTpe))
+ c.abort(c.enclosingPosition, s"Diverging implicit expansion for Case.Aux[$pTpe, $ftTpe :: HNil]")
+
+ val value = pTpe match {
+ case SingleType(_, f) ⇒ f
+ case other ⇒ c.abort(c.enclosingPosition, "Can only materialize cases from singleton values")
+ }
+
+ c.Expr[Case[P, FT :: HNil]] {
+ TypeApply(Select(Ident(value), newTermName("caseUniv")), List(TypeTree(tTpe)))
+ }
+ }
+ }
+
+ type Case0[P] = Case[P, HNil]
+ object Case0 {
+ type Aux[P, T] = Case.Aux[P, HNil, T]
+ def apply[P, T](v: T): Aux[P, T] = new Case[P, HNil] {
+ type Result = T
+ val value = (l: HNil) ⇒ v
+ }
+ }
+
+ /**
+ * Represents the composition of two polymorphic function values.
+ *
+ * @author Miles Sabin
+ */
+ class Compose[F, G](f: F, g: G) extends Poly
+
+ object Compose {
+ implicit def composeCase[C, F <: Poly, G <: Poly, T, U, V](implicit unpack: Unpack2[C, Compose, F, G], cG: Case1.Aux[G, T, U], cF: Case1.Aux[F, U, V]) = new Case[C, T :: HNil] {
+ type Result = V
+ val value = (t: T :: HNil) ⇒ cF(cG.value(t))
+ }
+ }
+
+ /**
+ * Base class for lifting a `Function1` to a `Poly1`
+ */
+ class ->[T, R](f: T ⇒ R) extends Poly1 {
+ implicit def subT[U <: T] = at[U](f)
+ }
+
+ trait LowPriorityLiftFunction1 extends Poly1 {
+ implicit def default[T] = at[T](_ ⇒ HNil: HNil)
+ }
+
+ /**
+ * Base class for lifting a `Function1` to a `Poly1` over the universal domain, yielding an `HList` with the result as
+ * its only element if the argument is in the original functions domain, `HNil` otherwise.
+ */
+ class >->[T, R](f: T ⇒ R) extends LowPriorityLiftFunction1 {
+ implicit def subT[U <: T] = at[U](f(_) :: HNil)
+ }
+
+ trait LowPriorityLiftU extends Poly {
+ implicit def default[L <: HList] = new ProductCase[L] {
+ type Result = HNil
+ val value = (l: L) ⇒ HNil
+ }
+ }
+
+ /**
+ * Base class for lifting a `Poly` to a `Poly` over the universal domain, yielding an `HList` with the result as it's
+ * only element if the argument is in the original functions domain, `HNil` otherwise.
+ */
+ class LiftU[P <: Poly](p: P) extends LowPriorityLiftU {
+ implicit def defined[L <: HList](implicit caseT: Case[P, L]) = new ProductCase[L] {
+ type Result = caseT.Result :: HNil
+ val value = (l: L) ⇒ caseT(l) :: HNil
+ }
+ }
+
+ /**
+ * Base trait for natural transformations.
+ *
+ * @author Miles Sabin
+ */
+ trait ~>[F[_], G[_]] extends Poly1 {
+ def apply[T](f: F[T]): G[T]
+ implicit def caseUniv[T]: Case.Aux[F[T], G[T]] = at[F[T]](apply(_))
+ }
+
+ object ~> {
+ implicit def inst1[F[_], G[_], T](f: F ~> G): F[T] ⇒ G[T] = f(_)
+ implicit def inst2[G[_], T](f: Id ~> G): T ⇒ G[T] = f(_)
+ implicit def inst3[F[_], T](f: F ~> Id): F[T] ⇒ T = f(_)
+ implicit def inst4[T](f: Id ~> Id): T ⇒ T = f[T](_) // Explicit type argument needed here to prevent recursion?
+ implicit def inst5[F[_], G, T](f: F ~> Const[G]#λ): F[T] ⇒ G = f(_)
+ implicit def inst6[G, T](f: Id ~> Const[G]#λ): T ⇒ G = f(_)
+ implicit def inst7[F, G](f: Const[F]#λ ~> Const[G]#λ): F ⇒ G = f(_)
+ }
+
+ /** Natural transformation with a constant type constructor on the right hand side. */
+ type ~>>[F[_], R] = ~>[F, Const[R]#λ]
+
+ /** Polymorphic identity function. */
+ object identity extends (Id ~> Id) {
+ def apply[T](t: T) = t
+ }
+}
+
+/**
+ * Base trait for polymorphic values.
+ *
+ * @author Miles Sabin
+ */
+trait Poly extends PolyApply {
+ import poly._
+
+ def compose(f: Poly) = new Compose[this.type, f.type](this, f)
+
+ def andThen(f: Poly) = new Compose[f.type, this.type](f, this)
+
+ /** The type of the case representing this polymorphic function at argument types `L`. */
+ type ProductCase[L <: HList] = Case[this.type, L]
+ object ProductCase {
+ /** The type of a case of this polymorphic function of the form `L => R` */
+ type Aux[L <: HList, Result0] = ProductCase[L] { type Result = Result0 }
+
+ /** The type of a case of this polymorphic function of the form `T => T` */
+ type Hom[T] = Aux[T :: HNil, T]
+
+ def apply[L <: HList, R](v: L ⇒ R) = new ProductCase[L] {
+ type Result = R
+ val value = v
+ }
+ }
+
+ def use[T, L <: HList, R](t: T)(implicit cb: CaseBuilder[T, L, R]) = cb(t)
+
+ trait CaseBuilder[T, L <: HList, R] {
+ def apply(t: T): ProductCase.Aux[L, R]
+ }
+
+ trait LowPriorityCaseBuilder {
+ implicit def valueCaseBuilder[T]: CaseBuilder[T, HNil, T] =
+ new CaseBuilder[T, HNil, T] {
+ def apply(t: T) = ProductCase((_: HNil) ⇒ t)
+ }
+ }
+
+ object CaseBuilder extends LowPriorityCaseBuilder {
+ import ops.function.FnToProduct
+ implicit def fnCaseBuilder[F, H, T <: HList, Result](implicit fntp: FnToProduct.Aux[F, ((H :: T) ⇒ Result)]): CaseBuilder[F, H :: T, Result] =
+ new CaseBuilder[F, H :: T, Result] {
+ def apply(f: F) = ProductCase((l: H :: T) ⇒ fntp(f)(l))
+ }
+ }
+
+ def caseAt[L <: HList](implicit c: ProductCase[L]) = c
+
+ def apply[R](implicit c: ProductCase.Aux[HNil, R]): R = c()
+}
+
+/**
+ * Provides implicit conversions from polymorphic function values to monomorphic function values, eg. for use as
+ * arguments to ordinary higher order functions.
+ *
+ * @author Miles Sabin
+ */
+object Poly extends PolyInst {
+ implicit def inst0(p: Poly)(implicit cse: p.ProductCase[HNil]): cse.Result = cse()
+
+ implicit def apply(f: Any): Poly = macro liftFnImpl
+
+ def liftFnImpl(c: Context)(f: c.Expr[Any]): c.Expr[Poly] = {
+ import c.universe._
+ import Flag._
+
+ val pendingSuperCall = Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())
+
+ val moduleName = newTermName(c.fresh)
+
+ val anySym = c.mirror.staticClass("scala.Any")
+ val anyTpe = anySym.asType.toType
+ val nothingSym = c.mirror.staticClass("scala.Nothing")
+ val nothingTpe = nothingSym.asType.toType
+
+ val typeOpsSym = c.mirror.staticPackage("shapeless")
+ val idSym = typeOpsSym.newTypeSymbol(newTypeName("Id"))
+ val constSym = typeOpsSym.newTypeSymbol(newTypeName("Const"))
+
+ val natTSym = c.mirror.staticClass("shapeless.PolyDefns.$tilde$greater")
+ val natTTpe = natTSym.asClass.toTypeConstructor
+
+ def mkApply(fSym: Symbol, gSym: Symbol, targ: TypeName, arg: TermName, body: Tree) = {
+ def mkTargRef(sym: Symbol) =
+ if (sym == idSym)
+ Ident(targ)
+ else if (sym.asType.typeParams.isEmpty)
+ Ident(sym.name)
+ else
+ AppliedTypeTree(Ident(sym.name), List(Ident(targ)))
+
+ DefDef(
+ Modifiers(), newTermName("apply"),
+ List(TypeDef(Modifiers(PARAM), targ, List(), TypeBoundsTree(TypeTree(nothingTpe), TypeTree(anyTpe)))),
+ List(List(ValDef(Modifiers(PARAM), arg, mkTargRef(fSym), EmptyTree))),
+ mkTargRef(gSym),
+ body)
+ }
+
+ def destructureMethod(methodSym: MethodSymbol) = {
+ val paramSym = methodSym.paramss match {
+ case List(List(ps)) ⇒ ps
+ case _ ⇒ c.abort(c.enclosingPosition, "Expression $f has the wrong shape to be converted to a polymorphic function value")
+ }
+
+ def extractTc(tpe: Type): Symbol = {
+ val owner = tpe.typeSymbol.owner
+ if (owner == methodSym) idSym
+ else tpe.typeConstructor.typeSymbol
+ }
+
+ (extractTc(paramSym.typeSignature), extractTc(methodSym.returnType))
+ }
+
+ def stripSymbolsAndTypes(tree: Tree, internalSyms: List[Symbol]) = {
+ // Adapted from https://github.com/scala/async/blob/master/src/main/scala/scala/async/TransformUtils.scala#L226
+ final class StripSymbolsAndTypes extends Transformer {
+ override def transform(tree: Tree): Tree = super.transform {
+ tree match {
+ case TypeApply(fn, args) if args.map(t ⇒ transform(t)) exists (_.isEmpty) ⇒ transform(fn)
+ case EmptyTree ⇒ tree
+ case _ ⇒
+ val hasSymbol: Boolean = {
+ val reflectInternalTree = tree.asInstanceOf[symtab.Tree forSome { val symtab: reflect.internal.SymbolTable }]
+ reflectInternalTree.hasSymbol
+ }
+ val dupl = tree.duplicate
+ if (hasSymbol)
+ dupl.symbol = NoSymbol
+ dupl.tpe = null
+ dupl
+ }
+ }
+ }
+
+ (new StripSymbolsAndTypes).transform(tree)
+ }
+
+ val (fSym, gSym, dd) =
+ f.tree match {
+ case Block(List(), Function(List(_), Apply(TypeApply(fun, _), _))) ⇒
+ val methodSym = fun.symbol.asMethod
+
+ val (fSym1, gSym1) = destructureMethod(methodSym)
+ val body = Apply(TypeApply(Ident(methodSym), List(Ident(newTypeName("T")))), List(Ident(newTermName("t"))))
+
+ (fSym1, gSym1, mkApply(fSym1, gSym1, newTypeName("T"), newTermName("t"), body))
+
+ case Block(List(), Function(List(_), Apply(fun, _))) ⇒
+ val methodSym = fun.symbol.asMethod
+
+ val (fSym1, gSym1) = destructureMethod(methodSym)
+ val body = Apply(Ident(methodSym), List(Ident(newTermName("t"))))
+
+ (fSym1, gSym1, mkApply(fSym1, gSym1, newTypeName("T"), newTermName("t"), body))
+
+ case Block(List(df @ DefDef(mods, _, List(tp), List(List(vp)), tpt, rhs)), Literal(Constant(()))) ⇒
+ val methodSym = df.symbol.asMethod
+
+ val (fSym1, gSym1) = destructureMethod(methodSym)
+
+ val body = mkApply(fSym1, gSym1, tp.name, vp.name, stripSymbolsAndTypes(rhs, List()))
+
+ (fSym1, gSym1, body)
+
+ case Block(List(df @ DefDef(_, _, List(), List(List(vp)), tpt, rhs)), Literal(Constant(()))) ⇒
+ val methodSym = df.symbol.asMethod
+
+ val (fSym1, gSym1) = destructureMethod(methodSym)
+
+ val body = mkApply(fSym1, gSym1, newTypeName("T"), vp.name, stripSymbolsAndTypes(rhs, List()))
+ (fSym1, gSym1, body)
+
+ case _ ⇒
+ c.abort(c.enclosingPosition, s"Unable to convert expression $f to a polymorphic function value")
+ }
+
+ def mkTargTree(sym: Symbol) =
+ if (sym == idSym)
+ Select(Ident(newTermName("shapeless")), newTypeName("Id"))
+ else if (sym.asType.typeParams.isEmpty)
+ SelectFromTypeTree(
+ AppliedTypeTree(
+ Select(Ident(newTermName("shapeless")), newTypeName("Const")),
+ List(Ident(sym.name))),
+ newTypeName("λ"))
+ else
+ Ident(sym.name)
+
+ val liftedTypeTree =
+ AppliedTypeTree(
+ Ident(natTSym),
+ List(mkTargTree(fSym), mkTargTree(gSym)))
+
+ val moduleDef =
+ ModuleDef(Modifiers(), moduleName,
+ Template(
+ List(liftedTypeTree),
+ emptyValDef,
+ List(
+ DefDef(
+ Modifiers(), nme.CONSTRUCTOR, List(),
+ List(List()),
+ TypeTree(),
+ Block(List(pendingSuperCall), Literal(Constant(())))),
+
+ dd)))
+
+ c.Expr[Poly] {
+ Block(
+ List(moduleDef),
+ Ident(moduleName))
+ }
+ }
+}
+
+/**
+ * Trait simplifying the creation of polymorphic values.
+ */
+trait Poly0 extends Poly {
+ type Case0[T] = ProductCase.Aux[HNil, T]
+
+ def at[T](t: T) = new ProductCase[HNil] {
+ type Result = T
+ val value = (l: HNil) ⇒ t
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/records.scala b/akka-parsing/src/main/scala/akka/shapeless/records.scala
new file mode 100644
index 0000000000..671fb54c71
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/records.scala
@@ -0,0 +1,112 @@
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+/**
+ * Record operations on `HList`'s with field-like elements.
+ *
+ * @author Miles Sabin
+ */
+object record {
+ import ops.hlist.Union
+ import ops.record.{ Keys, Values }
+ import syntax.RecordOps
+
+ implicit def recordOps[L <: HList](l: L): RecordOps[L] = new RecordOps(l)
+
+ /**
+ * The type of fields with keys of singleton type `K` and value type `V`.
+ */
+ type FieldType[K, V] = V with KeyTag[K, V]
+ trait KeyTag[K, V]
+
+ /**
+ * Yields a result encoding the supplied value with the singleton type `K' of its key.
+ */
+ def field[K] = new FieldBuilder[K]
+
+ class FieldBuilder[K] {
+ def apply[V](v: V): FieldType[K, V] = v.asInstanceOf[FieldType[K, V]]
+ }
+
+ /**
+ * Utility trait intended for inferring a record type from a sample value and unpacking it into its
+ * key and value types.
+ */
+ trait RecordType {
+ type Record <: HList
+ type Union <: Coproduct
+ type Keys <: HList
+ type Values <: HList
+ }
+
+ object RecordType {
+ type Aux[L <: HList, C <: Coproduct, K, V] = RecordType {
+ type Record = L; type Union = C; type Keys = K; type Values = V
+ }
+
+ def apply[L <: HList](implicit union: Union[L], keys: Keys[L], values: Values[L]): Aux[L, union.Out, keys.Out, values.Out] =
+ new RecordType {
+ type Record = L
+ type Union = union.Out
+ type Keys = keys.Out
+ type Values = values.Out
+ }
+
+ def like[L <: HList](l: ⇒ L)(implicit union: Union[L], keys: Keys[L], values: Values[L]): Aux[L, union.Out, keys.Out, values.Out] =
+ new RecordType {
+ type Record = L
+ type Union = union.Out
+ type Keys = keys.Out
+ type Values = values.Out
+ }
+ }
+}
+
+/**
+ * Polymorphic function that allows modifications on record fields while preserving the
+ * original key types.
+ *
+ * @author Dario Rexin
+ */
+trait FieldPoly extends Poly1 {
+ import record._
+
+ class FieldCaseBuilder[A, T] {
+ def apply[Res](fn: A ⇒ Res) = new Case[FieldType[T, A]] {
+ type Result = FieldType[T, Res]
+ val value: Function1[A :: HNil, FieldType[T, Res]] =
+ (l: A :: HNil) ⇒ field[T](fn(l.head))
+ }
+ }
+
+ def atField[A](w: Witness) = new FieldCaseBuilder[A, w.T]
+}
+
+/**
+ * Field with values of type `V`.
+ *
+ * Record keys of this form should be objects which extend this trait. Keys may also be arbitrary singleton typed
+ * values, however keys of this form enforce the type of their values.
+ *
+ * @author Miles Sabin
+ */
+trait FieldOf[V] {
+ import record._
+
+ def ->>(v: V): FieldType[this.type, V] = field[this.type](v)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/singletons.scala b/akka-parsing/src/main/scala/akka/shapeless/singletons.scala
new file mode 100644
index 0000000000..c7958367af
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/singletons.scala
@@ -0,0 +1,374 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.language.existentials
+import scala.language.experimental.macros
+
+import scala.reflect.macros.Context
+
+import tag.@@
+
+trait Witness {
+ type T
+ val value: T {}
+}
+
+object Witness {
+ type Aux[T0] = Witness { type T = T0 }
+ type Lt[Lub] = Witness { type T <: Lub }
+
+ implicit def apply[T]: Witness.Aux[T] = macro SingletonTypeMacros.materializeImpl[T]
+
+ implicit def apply[T](t: T): Witness.Lt[T] = macro SingletonTypeMacros.convertImpl[T]
+
+ implicit val witness0: Witness.Aux[_0] =
+ new Witness {
+ type T = _0
+ val value = Nat._0
+ }
+
+ implicit def witnessN[P <: Nat]: Witness.Aux[Succ[P]] =
+ new Witness {
+ type T = Succ[P]
+ val value = new Succ[P]()
+ }
+}
+
+trait WitnessWith[TC[_]] extends Witness {
+ val instance: TC[T]
+ type Out
+}
+
+trait LowPriorityWitnessWith {
+ implicit def apply2[H, TC2[_ <: H, _], S <: H, T](t: T): WitnessWith.Lt[({ type λ[X] = TC2[S, X] })#λ, T] = macro SingletonTypeMacros.convertInstanceImpl2[H, TC2, S, T]
+}
+
+object WitnessWith extends LowPriorityWitnessWith {
+ type Aux[TC[_], T0] = WitnessWith[TC] { type T = T0 }
+ type Lt[TC[_], Lub] = WitnessWith[TC] { type T <: Lub }
+
+ implicit def apply1[TC[_], T](t: T): WitnessWith.Lt[TC, T] = macro SingletonTypeMacros.convertInstanceImpl1[TC, T]
+}
+
+trait SingletonTypeMacros[C <: Context] {
+ import syntax.SingletonOps
+ type SingletonOpsLt[Lub] = SingletonOps { type T <: Lub }
+
+ val c: C
+
+ import c.universe._
+ import Flag._
+
+ def mkWitnessT(sTpe: Type, s: Any): Tree =
+ mkWitness(TypeTree(sTpe), Literal(Constant(s)))
+
+ def mkWitness(sTpt: TypTree, s: Tree): Tree = {
+ val witnessTpt = Ident(typeOf[Witness].typeSymbol)
+ val T = TypeDef(Modifiers(), newTypeName("T"), List(), sTpt)
+ val value = ValDef(Modifiers(), newTermName("value"), sTpt, s)
+ mkImplClass(witnessTpt, List(T, value), List())
+ }
+
+ def materializeImpl(tpe: Type): Tree = {
+ val SymTpe = typeOf[scala.Symbol]
+ val TaggedSym = typeOf[tag.Tagged[_]].typeConstructor.typeSymbol
+
+ val ScalaName = newTermName("scala")
+ val SymName = newTermName("Symbol")
+ val ApplyName = newTermName("apply")
+
+ tpe.normalize match {
+ case t @ ConstantType(Constant(s)) ⇒ mkWitnessT(t, s)
+
+ case t @ SingleType(p, v) if !v.isParameter ⇒
+ mkWitness(TypeTree(t), TypeApply(Select(Ident(v), newTermName("asInstanceOf")), List(TypeTree(t))))
+
+ case t @ RefinedType(List(SymTpe, TypeRef(_, TaggedSym, List(ConstantType(const)))), _) ⇒
+ val tTpt = TypeTree(t)
+ val symTree = Apply(Select(Select(Ident(ScalaName), SymName), ApplyName), List(Literal(const)))
+ mkWitness(tTpt, mkTagged(tTpt, symTree))
+
+ case t ⇒
+ println(s"t: $t ${t.getClass.getName}")
+ c.abort(c.enclosingPosition, s"Type argument $t is not a singleton type")
+ }
+ }
+
+ def convertImpl(t: c.Expr[Any]): Tree = {
+ val SymTpe = typeOf[scala.Symbol]
+
+ val ScalaName = newTermName("scala")
+ val SymName = newTermName("Symbol")
+ val ApplyName = newTermName("apply")
+
+ (t.actualType, t.tree) match {
+ case (tpe @ ConstantType(const: Constant), _) ⇒
+ mkWitness(TypeTree(tpe), Literal(const))
+
+ case (tpe @ SingleType(p, v), tree) if !v.isParameter ⇒
+ mkWitness(TypeTree(tpe), tree)
+
+ case (tpe: TypeRef, Literal(const: Constant)) ⇒
+ mkWitness(TypeTree(ConstantType(const)), Literal(const))
+
+ case (SymTpe, Apply(Select(Select(Ident(ScalaName), SymName), ApplyName), List(Literal(const: Constant)))) ⇒
+ val atatTpe = typeOf[@@[_, _]].typeConstructor
+ val sTpt = TypeTree(appliedType(atatTpe, List(SymTpe, ConstantType(const))))
+ val sVal = mkTagged(sTpt, t.tree)
+ mkWitness(sTpt, sVal)
+
+ case _ ⇒
+ c.abort(c.enclosingPosition, s"Expression ${t.tree} does not evaluate to a constant or a stable value")
+ }
+ }
+
+ def mkWitnessWith(singletonInstanceTpt: TypTree, sTpt: TypTree, s: Tree, i: Tree): Tree = {
+ val iTpe =
+ (i.tpe match {
+ case NullaryMethodType(resTpe) ⇒ resTpe
+ case other ⇒ other
+ }).normalize
+
+ val iOut = iTpe.member(newTypeName("Out")) match {
+ case NoSymbol ⇒ definitions.NothingClass
+ case other ⇒ other
+ }
+
+ val niTpt = TypeTree(iTpe)
+
+ val T = TypeDef(Modifiers(), newTypeName("T"), List(), sTpt)
+ val value = ValDef(Modifiers(), newTermName("value"), sTpt, s)
+ val instance = ValDef(Modifiers(), newTermName("instance"), niTpt, i)
+ val Out = TypeDef(Modifiers(), newTypeName("Out"), List(), Ident(iOut))
+ mkImplClass(singletonInstanceTpt, List(T, value, instance, Out), List())
+ }
+
+ def convertInstanceImpl[TC[_]](t: c.Expr[Any])(implicit tcTag: c.WeakTypeTag[TC[_]]): Tree = {
+ val SymTpe = typeOf[scala.Symbol]
+
+ val ScalaName = newTermName("scala")
+ val SymName = newTermName("Symbol")
+ val ApplyName = newTermName("apply")
+
+ val tc = tcTag.tpe.typeConstructor
+ val siTpt =
+ AppliedTypeTree(
+ Select(Ident(newTermName("shapeless")), newTypeName("WitnessWith")),
+ List(TypeTree(tc)))
+
+ (t.actualType, t.tree) match {
+ case (tpe @ ConstantType(const: Constant), _) ⇒
+ val tci = appliedType(tc, List(tpe))
+ val i = c.inferImplicitValue(tci, silent = false)
+ mkWitnessWith(siTpt, TypeTree(tpe), Literal(const), i)
+
+ case (tpe @ SingleType(p, v), tree) if !v.isParameter ⇒
+ val tci = appliedType(tc, List(tpe))
+ val i = c.inferImplicitValue(tci, silent = false)
+ mkWitnessWith(siTpt, TypeTree(tpe), tree, i)
+
+ case (SymTpe, Apply(Select(Select(Ident(ScalaName), SymName), ApplyName), List(Literal(const: Constant)))) ⇒
+ val atatTpe = typeOf[@@[_, _]].typeConstructor
+ val tci = appliedType(tc, List(appliedType(atatTpe, List(SymTpe, ConstantType(const)))))
+ val sTpt = TypeTree(appliedType(atatTpe, List(SymTpe, ConstantType(const))))
+ val sVal = mkTagged(sTpt, t.tree)
+ val i = c.inferImplicitValue(tci, silent = false)
+ mkWitnessWith(siTpt, sTpt, sVal, i)
+
+ case (tpe: TypeRef, Literal(const: Constant)) ⇒
+ val tci = appliedType(tc, List(ConstantType(const)))
+ val i = c.inferImplicitValue(tci, silent = false)
+ mkWitnessWith(siTpt, TypeTree(ConstantType(const)), Literal(const), i)
+
+ case _ ⇒
+ c.abort(c.enclosingPosition, s"Expression ${t.tree} does not evaluate to a constant or a stable value")
+ }
+ }
+
+ def convertInstanceImpl2[H, TC2[_ <: H, _], S <: H](t: c.Expr[Any])(implicit tc2Tag: c.WeakTypeTag[TC2[_, _]], sTag: c.WeakTypeTag[S]): Tree = {
+ val SymTpe = typeOf[scala.Symbol]
+
+ val ScalaName = newTermName("scala")
+ val SymName = newTermName("Symbol")
+ val ApplyName = newTermName("apply")
+
+ val tc2 = tc2Tag.tpe.typeConstructor
+ val s = sTag.tpe
+
+ val pre = weakTypeOf[WitnessWith[({ type λ[X] = TC2[S, X] })#λ]]
+ val pre2 = pre.map {
+ _ match {
+ case TypeRef(prefix, sym, args) if sym.isFreeType ⇒
+ TypeRef(NoPrefix, tc2.typeSymbol, args)
+ case tpe ⇒ tpe
+ }
+ }
+ val tc = pre2.normalize
+
+ (t.actualType, t.tree) match {
+ case (tpe @ ConstantType(const: Constant), _) ⇒
+ val tci = appliedType(tc2, List(s, tpe))
+ val i = c.inferImplicitValue(tci, silent = false)
+ mkWitnessWith(TypeTree(tc), TypeTree(tpe), Literal(const), i)
+
+ case (tpe @ SingleType(p, v), tree) if !v.isParameter ⇒
+ val tci = appliedType(tc2, List(s, tpe))
+ val i = c.inferImplicitValue(tci, silent = false)
+ mkWitnessWith(TypeTree(tc), TypeTree(tpe), tree, i)
+
+ case (SymTpe, Apply(Select(Select(Ident(ScalaName), SymName), ApplyName), List(Literal(const: Constant)))) ⇒
+ val atatTpe = typeOf[@@[_, _]].typeConstructor
+ val tci = appliedType(tc2, List(s, appliedType(atatTpe, List(SymTpe, ConstantType(const)))))
+ val sTpt = TypeTree(appliedType(atatTpe, List(SymTpe, ConstantType(const))))
+ val sVal = mkTagged(sTpt, t.tree)
+ val i = c.inferImplicitValue(tci, silent = false)
+ mkWitnessWith(TypeTree(tc), sTpt, sVal, i)
+
+ case (tpe: TypeRef, Literal(const: Constant)) ⇒
+ val tci = appliedType(tc2, List(s, ConstantType(const)))
+ val i = c.inferImplicitValue(tci, silent = false)
+ mkWitnessWith(TypeTree(tc), TypeTree(ConstantType(const)), Literal(const), i)
+
+ case _ ⇒
+ c.abort(c.enclosingPosition, s"Expression ${t.tree} does not evaluate to a constant or a stable value")
+ }
+ }
+
+ def mkOps(sTpt: TypTree, w: Tree): c.Expr[SingletonOps] = {
+ val opsTpt = Ident(typeOf[SingletonOps].typeSymbol)
+ val T = TypeDef(Modifiers(), newTypeName("T"), List(), sTpt)
+ val value = ValDef(Modifiers(), newTermName("witness"), TypeTree(), w)
+ c.Expr[SingletonOps] {
+ mkImplClass(opsTpt, List(T, value), List())
+ }
+ }
+
+ def mkTagged(tpt: Tree, t: Tree): Tree =
+ TypeApply(Select(t, newTermName("asInstanceOf")), List(tpt))
+
+ def mkSingletonOps(t: c.Expr[Any]): c.Expr[SingletonOps] = {
+ val SymTpe = typeOf[scala.Symbol]
+
+ val ScalaName = newTermName("scala")
+ val SymName = newTermName("Symbol")
+ val ApplyName = newTermName("apply")
+
+ (t.actualType, t.tree) match {
+ case (tpe @ ConstantType(const: Constant), _) ⇒
+ val sTpt = TypeTree(tpe)
+ mkOps(sTpt, mkWitness(sTpt, Literal(const)))
+
+ case (tpe @ SingleType(p, v), tree) if !v.isParameter ⇒
+ val sTpt = TypeTree(tpe)
+ mkOps(sTpt, mkWitness(sTpt, tree))
+
+ case (tpe: TypeRef, Literal(const: Constant)) ⇒
+ val sTpt = TypeTree(ConstantType(const))
+ mkOps(sTpt, mkWitness(sTpt, Literal(const)))
+
+ case (SymTpe, Apply(Select(Select(Ident(ScalaName), SymName), ApplyName), List(Literal(const: Constant)))) ⇒
+ val atatTpe = typeOf[@@[_, _]].typeConstructor
+ val sTpt = TypeTree(appliedType(atatTpe, List(SymTpe, ConstantType(const))))
+ val sVal = mkTagged(sTpt, t.tree)
+ mkOps(sTpt, mkWitness(sTpt, sVal))
+
+ case (tpe @ TypeRef(pre, sym, args), tree) ⇒
+ val sTpt = SingletonTypeTree(tree)
+ mkOps(sTpt, mkWitness(sTpt, tree))
+
+ case (tpe, tree) ⇒
+ c.abort(c.enclosingPosition, s"Expression ${t.tree} does not evaluate to a constant or a stable value")
+ }
+ }
+
+ def narrowSymbol[S <: String](t: c.Expr[scala.Symbol])(implicit sTag: c.WeakTypeTag[S]): c.Expr[scala.Symbol @@ S] = {
+ val ScalaName = newTermName("scala")
+ val SymName = newTermName("Symbol")
+ val ApplyName = newTermName("apply")
+
+ (sTag.tpe, t.tree) match {
+ case (ConstantType(Constant(s1)),
+ Apply(Select(Select(Ident(ScalaName), SymName), ApplyName), List(Literal(Constant(s2))))) if s1 == s2 ⇒
+ reify { t.splice.asInstanceOf[scala.Symbol @@ S] }
+ case _ ⇒
+ c.abort(c.enclosingPosition, s"Expression ${t.tree} is not an appropriate Symbol literal")
+ }
+ }
+
+ def constructor(prop: Boolean) =
+ DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ List(),
+ List(
+ if (prop)
+ List(
+ ValDef(Modifiers(PARAM), newTermName("i"), Ident(typeOf[Int].typeSymbol), EmptyTree))
+ else
+ Nil),
+ TypeTree(),
+ Block(
+ List(
+ Apply(
+ Select(
+ Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR),
+ if (prop)
+ List(Ident(newTermName("i")))
+ else
+ Nil)),
+ Literal(Constant(()))))
+
+ def mkImplClass(parent: Tree, defns: List[Tree], args: List[Tree]): Tree = {
+ val name = newTypeName(c.fresh())
+
+ val classDef =
+ ClassDef(
+ Modifiers(FINAL),
+ name,
+ List(),
+ Template(
+ List(parent),
+ emptyValDef,
+ constructor(args.size > 0) :: defns))
+
+ Block(
+ List(classDef),
+ Apply(Select(New(Ident(name)), nme.CONSTRUCTOR), args))
+ }
+}
+
+object SingletonTypeMacros {
+ import syntax.SingletonOps
+ type SingletonOpsLt[Lub] = SingletonOps { type T <: Lub }
+
+ def inst(c0: Context) = new SingletonTypeMacros[c0.type] { val c: c0.type = c0 }
+
+ def materializeImpl[T: c.WeakTypeTag](c: Context): c.Expr[Witness.Aux[T]] =
+ c.Expr[Witness.Aux[T]](inst(c).materializeImpl(c.weakTypeOf[T]))
+
+ def convertImpl[T](c: Context)(t: c.Expr[Any]): c.Expr[Witness.Lt[T]] = c.Expr(inst(c).convertImpl(t))
+
+ def convertInstanceImpl1[TC[_], T](c: Context)(t: c.Expr[Any])(implicit tcTag: c.WeakTypeTag[TC[_]]): c.Expr[WitnessWith.Lt[TC, T]] = c.Expr[WitnessWith.Lt[TC, T]](inst(c).convertInstanceImpl[TC](t))
+
+ def convertInstanceImpl2[H, TC2[_ <: H, _], S <: H, T](c: Context)(t: c.Expr[Any])(implicit tcTag: c.WeakTypeTag[TC2[_, _]], sTag: c.WeakTypeTag[S]): c.Expr[WitnessWith.Lt[({ type λ[X] = TC2[S, X] })#λ, T]] =
+ c.Expr[WitnessWith.Lt[({ type λ[X] = TC2[S, X] })#λ, T]](inst(c).convertInstanceImpl2[H, TC2, S](t))
+
+ def mkSingletonOps(c: Context)(t: c.Expr[Any]): c.Expr[SingletonOps] = inst(c).mkSingletonOps(t)
+
+ def narrowSymbol[S <: String](c: Context)(t: c.Expr[scala.Symbol])(implicit sTag: c.WeakTypeTag[S]): c.Expr[scala.Symbol @@ S] = inst(c).narrowSymbol[S](t)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/sized.scala b/akka-parsing/src/main/scala/akka/shapeless/sized.scala
new file mode 100644
index 0000000000..7b3c61f636
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/sized.scala
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.collection.{ GenTraversable, GenTraversableLike }
+import scala.collection.generic.{ CanBuildFrom, IsTraversableLike }
+
+/**
+ * Wrapper for a collection type witnessing that it has the statically specified length. Can be
+ * applied to any type which can be viewed as a `GenTraversableLike`, ie. standard collections,
+ * `Array`s, `String`s etc.
+ *
+ * @author Miles Sabin
+ */
+final class Sized[+Repr, L <: Nat](val unsized: Repr) extends AnyVal
+
+/**
+ * Carrier for `Sized` operations.
+ *
+ * These operations are implemented here as extension methods of the minimal `Sized` type to avoid issues that would
+ * otherwise be caused by its covariance.
+ *
+ * @author Miles Sabin
+ */
+class SizedOps[A, Repr, L <: Nat](r: GenTraversableLike[A, Repr]) { outer ⇒
+ import nat._
+ import ops.nat._
+ import LT._
+ import Sized.wrap
+
+ /**
+ * Returns the head of this collection. Available only if there is evidence that this collection has at least one
+ * element.
+ */
+ def head(implicit ev: _0 < L): A = r.head
+
+ /**
+ * Returns the tail of this collection. Available only if there is evidence that this collection has at least one
+ * element.
+ */
+ def tail(implicit pred: Pred[L]) = wrap[Repr, pred.Out](r.tail)
+
+ /**
+ * Returns the first ''m'' elements of this collection. An explicit type argument must be provided. Available only if
+ * there is evidence that this collection has at least ''m'' elements. The resulting collection will be statically
+ * known to have ''m'' elements.
+ */
+ def take[M <: Nat](implicit diff: Diff[L, M], ev: ToInt[M]) = wrap[Repr, M](r.take(toInt[M]))
+
+ /**
+ * Returns the first ''m'' elements of this collection. Available only if there is evidence that this collection has
+ * at least ''m'' elements. The resulting collection will be statically known to have ''m'' elements.
+ */
+ def take(m: Nat)(implicit diff: Diff[L, m.N], ev: ToInt[m.N]) = wrap[Repr, m.N](r.take(toInt[m.N]))
+
+ /**
+ * Returns all but the first ''m'' elements of this collection. An explicit type argument must be provided. Available
+ * only if there is evidence that this collection has at least ''m'' elements. The resulting collection will be
+ * statically known to have ''m'' less elements than this collection.
+ */
+ def drop[M <: Nat](implicit diff: Diff[L, M], ev: ToInt[M]) = wrap[Repr, diff.Out](r.drop(toInt[M]))
+
+ /**
+ * Returns all but the first ''m'' elements of this collection. Available only if there is evidence that this
+ * collection has at least ''m'' elements. The resulting collection will be statically known to have ''m'' less
+ * elements than this collection.
+ */
+ def drop(m: Nat)(implicit diff: Diff[L, m.N], ev: ToInt[m.N]) = wrap[Repr, diff.Out](r.drop(toInt[m.N]))
+
+ /**
+ * Splits this collection at the ''mth'' element, returning the prefix and suffix as a pair. An explicit type argument
+ * must be provided. Available only if there is evidence that this collection has at least ''m'' elements. The
+ * resulting collections will be statically know to have ''m'' and ''n-m'' elements respectively.
+ */
+ def splitAt[M <: Nat](implicit diff: Diff[L, M], ev: ToInt[M]) = (take[M], drop[M])
+
+ /**
+ * Splits this collection at the ''mth'' element, returning the prefix and suffix as a pair. Available only if there
+ * is evidence that this collection has at least ''m'' elements. The resulting collections will be statically know to
+ * have ''m'' and ''n-m'' elements respectively.
+ */
+ def splitAt(m: Nat)(implicit diff: Diff[L, m.N], ev: ToInt[m.N]) = (take[m.N], drop[m.N])
+
+ /**
+ * Prepend the argument element to this collection. The resulting collection will be statically known to have a size
+ * one greater than this collection.
+ */
+ def +:(elem: A)(implicit cbf: CanBuildFrom[Repr, A, Repr]) = {
+ val builder = cbf.apply(r.repr)
+ builder += elem
+ builder ++= r.toIterator
+ wrap[Repr, Succ[L]](builder.result)
+ }
+
+ /**
+ * Append the argument element to this collection. The resulting collection will be statically known to have a size
+ * one greater than this collection.
+ */
+ def :+(elem: A)(implicit cbf: CanBuildFrom[Repr, A, Repr]) = {
+ val builder = cbf.apply(r.repr)
+ builder ++= r.toIterator
+ builder += elem
+ wrap[Repr, Succ[L]](builder.result)
+ }
+
+ /**
+ * Append the argument collection to this collection. The resulting collection will be statically known to have
+ * ''m+n'' elements.
+ */
+ def ++[B >: A, That, M <: Nat](that: Sized[That, M])(implicit sum: Sum[L, M],
+ cbf: CanBuildFrom[Repr, B, That],
+ convThat: That ⇒ GenTraversableLike[B, That]) = wrap[That, sum.Out](r ++ that.unsized)
+
+ /**
+ * Map across this collection. The resulting collection will be statically known to have the same number of elements
+ * as this collection.
+ */
+ def map[B, That](f: A ⇒ B)(implicit cbf: CanBuildFrom[Repr, B, That]) = wrap[That, L](r map f)
+}
+
+trait LowPrioritySized {
+ implicit def sizedToRepr[Repr](s: Sized[Repr, _]): Repr = s.unsized
+}
+
+object Sized extends LowPrioritySized {
+ implicit def sizedOps[Repr, L <: Nat](s: Sized[Repr, L])(implicit itl: IsTraversableLike[Repr]): SizedOps[itl.A, Repr, L] =
+ new SizedOps[itl.A, Repr, L](itl.conversion(s.unsized))
+
+ def apply[CC[_]] = new SizedBuilder[CC]
+
+ def apply[CC[_]]()(implicit cbf: CanBuildFrom[Nothing, Nothing, CC[Nothing]]) =
+ new Sized[CC[Nothing], _0](cbf().result)
+
+ def wrap[Repr, L <: Nat](r: Repr) = new Sized[Repr, L](r)
+
+ def unapplySeq[Repr, L <: Nat](x: Sized[Repr, L]) = Some(x.unsized)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/sybclass.scala b/akka-parsing/src/main/scala/akka/shapeless/sybclass.scala
new file mode 100644
index 0000000000..6625a0e8a4
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/sybclass.scala
@@ -0,0 +1,393 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.language.experimental.macros
+
+import scala.reflect.macros.Context
+
+import poly._
+
+/**
+ * An implementation of [http://research.microsoft.com/en-us/um/people/simonpj/papers/hmap/
+ * "Scrap your boilerplate with class"] in Scala.
+ *
+ * @author Miles Sabin
+ */
+
+/**
+ * Type class representing one-level generic queries.
+ */
+trait Data[F, T, R] {
+ def gmapQ(t: T): List[R]
+}
+
+trait LowPriorityData {
+ /**
+ * Default Data type class instance.
+ */
+ implicit def dfltData[F, T, R]: Data[F, T, R] = new Data[F, T, R] {
+ def gmapQ(t: T): List[R] = Nil
+ }
+
+ /**
+ * Data type class instance for types with associated `Generic`s.
+ *
+ * The use of a macro here is essential to support resolution of recursive references.
+ */
+ implicit def genericData[F <: Poly, T, R, U](implicit gen: Generic.Aux[T, R]): Data[F, T, U] = macro DataMacros.genericDataImpl[F, T, R, U]
+}
+
+object Data extends LowPriorityData {
+ def gmapQ[F, T, R](f: F)(t: T)(implicit data: Data[F, T, R]) = data.gmapQ(t)
+
+ /**
+ * Data type class instance for `List`s.
+ */
+ implicit def listData[F <: Poly, T, R](implicit qt: Case1.Aux[F, T, R]): Data[F, List[T], R] = new Data[F, List[T], R] {
+ def gmapQ(t: List[T]) = t.map(qt)
+ }
+
+ /**
+ * Data type class instance for `HList`s.
+ */
+ implicit def hnilData[F <: Poly, R]: Data[F, HNil, R] =
+ new Data[F, HNil, R] {
+ def gmapQ(t: HNil) = Nil
+ }
+
+ // Use of macro here is solely to prevent spurious implicit divergence
+ implicit def hlistData[F <: Poly, H, T <: HList, R](implicit qh: Case1.Aux[F, H, R], ct: Data[F, T, R]): Data[F, H :: T, R] = macro DataMacros.hlistDataImpl[F, H, T, R]
+
+ /**
+ * Data type class instance for `Coproducts`s.
+ */
+ implicit def cnilData[F <: Poly, R]: Data[F, CNil, R] =
+ new Data[F, CNil, R] {
+ def gmapQ(t: CNil) = Nil
+ }
+
+ // Use of macro here is solely to prevent spurious implicit divergence
+ implicit def coproductData[F <: Poly, H, T <: Coproduct, R](implicit qh: Case1.Aux[F, H, R], ct: Data[F, T, R]): Data[F, H :+: T, R] = macro DataMacros.coproductDataImpl[F, H, T, R]
+}
+
+object DataMacros {
+ def genericDataImpl[F: c.WeakTypeTag, T: c.WeakTypeTag, R: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(gen: c.Expr[Generic.Aux[T, R]]): c.Expr[Data[F, T, U]] = {
+ import c.universe._
+ import Flag._
+
+ val hlistSym = c.mirror.staticClass("shapeless.HList")
+ val hlistTpe = hlistSym.asClass.toType
+
+ val coproductSym = c.mirror.staticClass("shapeless.Coproduct")
+ val coproductTpe = coproductSym.asClass.toType
+
+ val fTpe = weakTypeOf[F]
+ val tTpe = weakTypeOf[T]
+ val rTpe = weakTypeOf[R]
+ val uTpe = weakTypeOf[U]
+
+ if (tTpe <:< hlistTpe || tTpe <:< coproductTpe) {
+ c.abort(c.enclosingPosition, "HLists and Coproducts not handled here")
+ }
+
+ val dataSym = c.mirror.staticClass("shapeless.Data")
+
+ val pendingSuperCall = Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())
+
+ val thisDataTypeTree =
+ AppliedTypeTree(
+ Ident(dataSym),
+ List(TypeTree(fTpe), TypeTree(tTpe), TypeTree(uTpe)))
+
+ val reprDataTypeTree =
+ AppliedTypeTree(
+ Ident(dataSym),
+ List(TypeTree(fTpe), TypeTree(rTpe), TypeTree(uTpe)))
+
+ val recName = newTermName(c.fresh)
+ val className = newTypeName(c.fresh)
+ val genericName = newTermName(c.fresh)
+ val reprDataName = newTermName(c.fresh)
+
+ val recClass =
+ ClassDef(Modifiers(FINAL), className, List(),
+ Template(
+ List(thisDataTypeTree),
+ emptyValDef,
+ List(
+ // Implicit publication of this to tie the knot
+ ValDef(Modifiers(IMPLICIT), recName, thisDataTypeTree, This(tpnme.EMPTY)),
+
+ DefDef(
+ Modifiers(), nme.CONSTRUCTOR, List(),
+ List(List()),
+ TypeTree(),
+ Block(List(pendingSuperCall), Literal(Constant(())))),
+
+ DefDef(
+ Modifiers(), newTermName("gmapQ"), List(),
+ List(List(ValDef(Modifiers(PARAM), newTermName("t"), TypeTree(tTpe), EmptyTree))),
+ TypeTree(),
+ Block(
+ List(
+ ValDef(Modifiers(), genericName, TypeTree(), gen.tree),
+ // Resolve the Data instance for the representation here, within the
+ // scope of the implicit self-publication above, allowing successful
+ // resolution of recursive references
+ ValDef(Modifiers(), reprDataName, reprDataTypeTree,
+ TypeApply(
+ Select(Ident(definitions.PredefModule), newTermName("implicitly")),
+ List(reprDataTypeTree)))),
+ Apply(
+ Select(Ident(reprDataName), newTermName("gmapQ")),
+ List(
+ Apply(
+ Select(Ident(genericName), newTermName("to")),
+ List(
+ Ident(newTermName("t")))))))))))
+
+ val block =
+ Block(
+ List(recClass),
+ Apply(Select(New(Ident(className)), nme.CONSTRUCTOR), List()))
+
+ c.Expr[Data[F, T, U]](block)
+ }
+
+ def hlistDataImpl[F: c.WeakTypeTag, H: c.WeakTypeTag, T <: HList: c.WeakTypeTag, R: c.WeakTypeTag](c: Context)(qh: c.Expr[Case1.Aux[F, H, R]], ct: c.Expr[Data[F, T, R]]): c.Expr[Data[F, H :: T, R]] = {
+ import c.universe._
+
+ reify {
+ new Data[F, H :: T, R] {
+ val qhs = qh.splice
+ val cts = ct.splice
+ def gmapQ(t: H :: T) = qhs(t.head :: HNil) :: cts.gmapQ(t.tail)
+ }
+ }
+ }
+
+ def coproductDataImpl[F: c.WeakTypeTag, H: c.WeakTypeTag, T <: Coproduct: c.WeakTypeTag, R: c.WeakTypeTag](c: Context)(qh: c.Expr[Case1.Aux[F, H, R]], ct: c.Expr[Data[F, T, R]]): c.Expr[Data[F, H :+: T, R]] = {
+ import c.universe._
+
+ reify {
+ new Data[F, H :+: T, R] {
+ val qhs = qh.splice
+ val cts = ct.splice
+ def gmapQ(c: H :+: T) = c match {
+ case Inl(h) ⇒ List(qhs(h :: HNil))
+ case Inr(t) ⇒ cts.gmapQ(t)
+ }
+ }
+ }
+ }
+}
+
+/**
+ * Type class representing one-level generic transformations.
+ */
+trait DataT[F, T, U] {
+ def gmapT(t: T): U
+}
+
+trait LowPriorityDataT {
+ /**
+ * Default DataT type class instance.
+ */
+ implicit def dfltDataT[F, T, U](implicit ev: T <:< U): DataT[F, T, U] = new DataT[F, T, U] {
+ def gmapT(t: T) = t
+ }
+
+ /**
+ * DataT type class instance for type with associated `Generics`s.
+ *
+ * The use of a macro here is essential to support resolution of recursive references.
+ */
+ implicit def genericDataT[F <: Poly, T, R](implicit gen: Generic.Aux[T, R]): DataT[F, T, T] = macro DataTMacros.genericDataTImpl[F, T, R]
+}
+
+object DataT extends LowPriorityDataT {
+ def gmapT[F, T, U](f: F)(t: T)(implicit data: DataT[F, T, U]) = data.gmapT(t)
+
+ /**
+ * DataT type class instance for `List`s.
+ */
+ implicit def listDataT[F <: Poly, T, U](implicit ft: Case1.Aux[F, T, U]): DataT[F, List[T], List[U]] =
+ new DataT[F, List[T], List[U]] {
+ def gmapT(t: List[T]) = t.map(ft)
+ }
+
+ /**
+ * DataT type class instance for `HList`s.
+ */
+ implicit def hnilDataT[F <: Poly]: DataT[F, HNil, HNil] =
+ new DataT[F, HNil, HNil] {
+ def gmapT(t: HNil) = HNil
+ }
+
+ // Use of macro here is solely to prevent spurious implicit divergence
+ implicit def hlistDataT[F <: Poly, H, T <: HList, U, V <: HList](implicit fh: Case1.Aux[F, H, U], ct: DataT[F, T, V]): DataT[F, H :: T, U :: V] = macro DataTMacros.hlistDataTImpl[F, H, T, U, V]
+
+ /**
+ * DataT type class instance for `Coproducts`s.
+ */
+ implicit def cnilDataT[F <: Poly]: DataT[F, CNil, CNil] =
+ new DataT[F, CNil, CNil] {
+ def gmapT(t: CNil) = sys.error("CNil is equivelant to Nothing there should be no values of this type")
+ }
+
+ // Use of macro here is solely to prevent spurious implicit divergence
+ implicit def coproductDataT[F <: Poly, H, T <: Coproduct, U, V <: Coproduct](implicit fh: Case1.Aux[F, H, U], ct: DataT[F, T, V]): DataT[F, H :+: T, U :+: V] = macro DataTMacros.coproductDataTImpl[F, H, T, U, V]
+}
+
+object DataTMacros {
+ def genericDataTImpl[F: c.WeakTypeTag, T: c.WeakTypeTag, R: c.WeakTypeTag](c: Context)(gen: c.Expr[Generic.Aux[T, R]]): c.Expr[DataT[F, T, T]] = {
+ import c.universe._
+ import Flag._
+
+ val hlistSym = c.mirror.staticClass("shapeless.HList")
+ val hlistTpe = hlistSym.asClass.toType
+
+ val coproductSym = c.mirror.staticClass("shapeless.Coproduct")
+ val coproductTpe = coproductSym.asClass.toType
+
+ val fTpe = weakTypeOf[F]
+ val tTpe = weakTypeOf[T]
+ val rTpe = weakTypeOf[R]
+
+ if (tTpe <:< hlistTpe || tTpe <:< coproductTpe) {
+ c.abort(c.enclosingPosition, "HLists and Coproducts not handled here")
+ }
+
+ val dataTSym = c.mirror.staticClass("shapeless.DataT")
+
+ val pendingSuperCall = Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())
+
+ val thisDataTTypeTree =
+ AppliedTypeTree(
+ Ident(dataTSym),
+ List(TypeTree(fTpe), TypeTree(tTpe), TypeTree(tTpe)))
+
+ val reprDataTTypeTree =
+ AppliedTypeTree(
+ Ident(dataTSym),
+ List(TypeTree(fTpe), TypeTree(rTpe), TypeTree(rTpe)))
+
+ val recName = newTermName(c.fresh)
+ val className = newTypeName(c.fresh)
+ val genericName = newTermName(c.fresh)
+ val reprDataTName = newTermName(c.fresh)
+
+ val recClass =
+ ClassDef(Modifiers(FINAL), className, List(),
+ Template(
+ List(thisDataTTypeTree),
+ emptyValDef,
+ List(
+ // Implicit publication of this to tie the knot
+ ValDef(Modifiers(IMPLICIT), recName, thisDataTTypeTree, This(tpnme.EMPTY)),
+
+ DefDef(
+ Modifiers(), nme.CONSTRUCTOR, List(),
+ List(List()),
+ TypeTree(),
+ Block(List(pendingSuperCall), Literal(Constant(())))),
+
+ DefDef(
+ Modifiers(), newTermName("gmapT"), List(),
+ List(List(ValDef(Modifiers(PARAM), newTermName("t"), TypeTree(tTpe), EmptyTree))),
+ TypeTree(),
+ Block(
+ List(
+ ValDef(Modifiers(), genericName, TypeTree(), gen.tree),
+ // Resolve the DataT instance for the representation here, within the
+ // scope of the implicit self-publication above, allowing successful
+ // resolution of recursive references
+ ValDef(Modifiers(), reprDataTName, reprDataTTypeTree,
+ TypeApply(
+ Select(Ident(definitions.PredefModule), newTermName("implicitly")),
+ List(reprDataTTypeTree)))),
+ Apply(
+ Select(Ident(genericName), newTermName("from")),
+ List(
+ Apply(
+ Select(Ident(reprDataTName), newTermName("gmapT")),
+ List(
+ Apply(
+ Select(Ident(genericName), newTermName("to")),
+ List(
+ Ident(newTermName("t")))))))))))))
+
+ val block =
+ Block(
+ List(recClass),
+ Apply(Select(New(Ident(className)), nme.CONSTRUCTOR), List()))
+
+ c.Expr[DataT[F, T, T]](block)
+ }
+
+ def hlistDataTImpl[F: c.WeakTypeTag, H: c.WeakTypeTag, T <: HList: c.WeakTypeTag, U: c.WeakTypeTag, V <: HList: c.WeakTypeTag](c: Context)(fh: c.Expr[Case1.Aux[F, H, U]], ct: c.Expr[DataT[F, T, V]]): c.Expr[DataT[F, H :: T, U :: V]] = {
+ import c.universe._
+
+ reify {
+ new DataT[F, H :: T, U :: V] {
+ val fhs = fh.splice
+ val cts = ct.splice
+ def gmapT(t: H :: T): U :: V = fhs(t.head :: HNil) :: cts.gmapT(t.tail)
+ }
+ }
+ }
+
+ def coproductDataTImpl[F: c.WeakTypeTag, H: c.WeakTypeTag, T <: Coproduct: c.WeakTypeTag, U: c.WeakTypeTag, V <: Coproduct: c.WeakTypeTag](c: Context)(fh: c.Expr[Case1.Aux[F, H, U]], ct: c.Expr[DataT[F, T, V]]): c.Expr[DataT[F, H :+: T, U :+: V]] = {
+ import c.universe._
+
+ reify {
+ new DataT[F, H :+: T, U :+: V] {
+ val fhs = fh.splice
+ val cts = ct.splice
+ def gmapT(c: H :+: T) = c match {
+ case Inl(h) ⇒ Inl(fhs(h :: HNil))
+ case Inr(t) ⇒ Inr(cts.gmapT(t))
+ }
+ }
+ }
+ }
+}
+
+class EverythingAux[F, K] extends Poly
+
+trait LowPriorityEverythingAux {
+ implicit def generic[E, F <: Poly, K <: Poly, T, G, R](implicit unpack: Unpack2[E, EverythingAux, F, K], f: Case1.Aux[F, T, R], gen: Generic.Aux[T, G], data: Data[E, G, R], k: Case2.Aux[K, R, R, R]) =
+ Case1[E, T, R](t ⇒ data.gmapQ(gen.to(t)).foldLeft(f(t))(k))
+}
+
+object EverythingAux extends LowPriorityEverythingAux {
+ implicit def default[E, F <: Poly, K <: Poly, T, R](implicit unpack: Unpack2[E, EverythingAux, F, K], f: Case1.Aux[F, T, R], data: Data[E, T, R], k: Case2.Aux[K, R, R, R]) =
+ Case1[E, T, R](t ⇒ data.gmapQ(t).foldLeft(f(t))(k))
+}
+
+class EverywhereAux[F] extends Poly
+
+trait LowPriorityEverywhereAux {
+ implicit def generic[E, F <: Poly, T, G](implicit unpack: Unpack1[E, EverywhereAux, F], gen: Generic.Aux[T, G], data: DataT[E, G, G], f: Case1[F, T] = Case1[F, T, T](identity)): Case1[E, T] { type Result = f.Result } =
+ Case1[E, T, f.Result](t ⇒ f(gen.from(data.gmapT(gen.to(t)))))
+}
+
+object EverywhereAux extends LowPriorityEverywhereAux {
+ implicit def default[E, F <: Poly, T, U](implicit unpack: Unpack1[E, EverywhereAux, F], data: DataT[E, T, U], f: Case1[F, U] = Case1[F, U, U](identity)): Case1[E, T] { type Result = f.Result } =
+ Case1[E, T, f.Result](t ⇒ f(data.gmapT(t)))
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/coproduct.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/coproduct.scala
new file mode 100644
index 0000000000..868206bfd3
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/coproduct.scala
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+
+/**
+ * Carrier for `Coproduct` operations.
+ *
+ * These methods are implemented here and extended onto the minimal `Coproduct` types to avoid issues that would
+ * otherwise be caused by the covariance of `:+:[H, T]`.
+ *
+ * @author Miles Sabin
+ */
+final class CoproductOps[C <: Coproduct](c: C) {
+ import ops.coproduct._
+
+ def map(f: Poly)(implicit mapper: Mapper[f.type, C]): mapper.Out = mapper(c)
+
+ def select[T](implicit selector: Selector[C, T]): Option[T] = selector(c)
+
+ def unify(implicit unifier: Unifier[C]): unifier.Out = unifier(c)
+
+ def zipWithKeys[K <: HList](keys: K)(implicit zipWithKeys: ZipWithKeys[K, C]): zipWithKeys.Out = zipWithKeys(keys, c)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/hlists.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/hlists.scala
new file mode 100644
index 0000000000..a1901572d9
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/hlists.scala
@@ -0,0 +1,443 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+
+import scala.annotation.tailrec
+
+/**
+ * Carrier for `HList` operations.
+ *
+ * These methods are implemented here and pimped onto the minimal `HList` types to avoid issues that would otherwise be
+ * caused by the covariance of `::[H, T]`.
+ *
+ * @author Miles Sabin
+ */
+final class HListOps[L <: HList](l: L) {
+ import ops.hlist._
+
+ /**
+ * Returns the head of this `HList`. Available only if there is evidence that this `HList` is composite.
+ */
+ def head(implicit c: IsHCons[L]): c.H = c.head(l)
+
+ /**
+ * Returns the tail of this `HList`. Available only if there is evidence that this `HList` is composite.
+ */
+ def tail(implicit c: IsHCons[L]): c.T = c.tail(l)
+
+ /**
+ * Prepend the argument element to this `HList`.
+ */
+ def ::[H](h: H): H :: L = akka.shapeless.::(h, l)
+
+ /**
+ * Prepend the argument element to this `HList`.
+ */
+ def +:[H](h: H): H :: L = akka.shapeless.::(h, l)
+
+ /**
+ * Append the argument element to this `HList`.
+ */
+ def :+[T](t: T)(implicit prepend: Prepend[L, T :: HNil]): prepend.Out = prepend(l, t :: HNil)
+
+ /**
+ * Append the argument `HList` to this `HList`.
+ */
+ def ++[S <: HList](suffix: S)(implicit prepend: Prepend[L, S]): prepend.Out = prepend(l, suffix)
+
+ /**
+ * Prepend the argument `HList` to this `HList`.
+ */
+ def ++:[P <: HList](prefix: P)(implicit prepend: Prepend[P, L]): prepend.Out = prepend(prefix, l)
+
+ /**
+ * Prepend the argument `HList` to this `HList`.
+ */
+ def :::[P <: HList](prefix: P)(implicit prepend: Prepend[P, L]): prepend.Out = prepend(prefix, l)
+
+ /**
+ * Prepend the reverse of the argument `HList` to this `HList`.
+ */
+ def reverse_:::[P <: HList](prefix: P)(implicit prepend: ReversePrepend[P, L]): prepend.Out = prepend(prefix, l)
+
+ /**
+ * Returns the ''nth'' element of this `HList`. An explicit type argument must be provided. Available only if there is
+ * evidence that this `HList` has at least ''n'' elements.
+ */
+ def apply[N <: Nat](implicit at: At[L, N]): at.Out = at(l)
+
+ /**
+ * Returns the ''nth'' element of this `HList`. Available only if there is evidence that this `HList` has at least ''n''
+ * elements.
+ */
+ def apply(n: Nat)(implicit at: At[L, n.N]): at.Out = at(l)
+
+ /**
+ * Returns the ''nth'' element of this `HList`. An explicit type argument must be provided. Available only if there is
+ * evidence that this `HList` has at least ''n'' elements.
+ */
+ def at[N <: Nat](implicit at: At[L, N]): at.Out = at(l)
+
+ /**
+ * Returns the ''nth'' element of this `HList`. Available only if there is evidence that this `HList` has at least ''n''
+ * elements.
+ */
+ def at(n: Nat)(implicit at: At[L, n.N]): at.Out = at(l)
+
+ /**
+ * Returns the last element of this `HList`. Available only if there is evidence that this `HList` is composite.
+ */
+ def last(implicit last: Last[L]): last.Out = last(l)
+
+ /**
+ * Returns an `HList` consisting of all the elements of this `HList` except the last. Available only if there is
+ * evidence that this `HList` is composite.
+ */
+ def init(implicit init: Init[L]): init.Out = init(l)
+
+ /**
+ * Returns the first element of type `U` of this `HList`. An explicit type argument must be provided. Available only
+ * if there is evidence that this `HList` has an element of type `U`.
+ */
+ def select[U](implicit selector: Selector[L, U]): U = selector(l)
+
+ /**
+ * Returns all elements of type `U` of this `HList`. An explicit type argument must be provided.
+ */
+ def filter[U](implicit filter: Filter[L, U]): filter.Out = filter(l)
+
+ /**
+ * Returns all elements of type different than `U` of this `HList`. An explicit type argument must be provided.
+ */
+ def filterNot[U](implicit filter: FilterNot[L, U]): filter.Out = filter(l)
+
+ /**
+ * Returns the first element of type `U` of this `HList` plus the remainder of the `HList`. An explicit type argument
+ * must be provided. Available only if there is evidence that this `HList` has an element of type `U`.
+ *
+ * The `Elem` suffix is here to avoid creating an ambiguity with RecordOps#remove and should be removed if
+ * SI-5414 is resolved in a way which eliminates the ambiguity.
+ */
+ def removeElem[U](implicit remove: Remove[L, U]): remove.Out = remove(l)
+
+ /**
+ * Returns the first elements of this `HList` that have types in `SL` plus the remainder of the `HList`. An expicit
+ * type argument must be provided. Available only if there is evidence that this `HList` contains elements with
+ * types in `SL`.
+ */
+ def removeAll[SL <: HList](implicit removeAll: RemoveAll[L, SL]): removeAll.Out = removeAll(l)
+
+ /**
+ * Replaces the first element of type `U` of this `HList` with the supplied value, also of type `U` returning both
+ * the replaced element and the updated `HList`. Available only if there is evidence that this `HList` has an element
+ * of type `U`.
+ */
+ def replace[U](u: U)(implicit replacer: Replacer[L, U, U]): replacer.Out = replacer(l, u)
+
+ class ReplaceTypeAux[U] {
+ def apply[V](v: V)(implicit replacer: Replacer[L, U, V]): replacer.Out = replacer(l, v)
+ }
+
+ /**
+ * Replaces the first element of type `U` of this `HList` with the supplied value of type `V`, returning both the
+ * replaced element and the updated `HList`. An explicit type argument must be provided for `U`. Available only if
+ * there is evidence that this `HList` has an element of type `U`.
+ */
+ def replaceType[U] = new ReplaceTypeAux[U]
+
+ /**
+ * Replaces the first element of type `U` of this `HList` with the supplied value, also of type `U`. Available only
+ * if there is evidence that this `HList` has an element of type `U`.
+ *
+ * The `Elem` suffix is here to avoid creating an ambiguity with RecordOps#updated and should be removed if
+ * SI-5414 is resolved in a way which eliminates the ambiguity.
+ */
+ def updatedElem[U, Out <: HList](u: U)(implicit replacer: Replacer.Aux[L, U, U, (U, Out)]): Out = replacer(l, u)._2
+
+ class UpdatedTypeAux[U] {
+ def apply[V, Out <: HList](v: V)(implicit replacer: Replacer.Aux[L, U, V, (U, Out)]): Out = replacer(l, v)._2
+ }
+
+ /**
+ * Replaces the first element of type `U` of this `HList` with the supplied value of type `V`. An explicit type
+ * argument must be provided for `U`. Available only if there is evidence that this `HList` has an element of
+ * type `U`.
+ */
+ def updatedType[U] = new UpdatedTypeAux[U]
+
+ class UpdatedAtAux[N <: Nat] {
+ def apply[U, V, Out <: HList](u: U)(implicit replacer: ReplaceAt.Aux[L, N, U, (V, Out)]): Out = replacer(l, u)._2
+ }
+
+ /**
+ * Replaces the ''nth' element of this `HList` with the supplied value of type `U`. An explicit type argument
+ * must be provided for `N`. Available only if there is evidence that this `HList` has at least ''n'' elements.
+ */
+ def updatedAt[N <: Nat] = new UpdatedAtAux[N]
+
+ /**
+ * Replaces the ''nth' element of this `HList` with the supplied value of type `U`. Available only if there is
+ * evidence that this `HList` has at least ''n'' elements.
+ */
+ def updatedAt[U, V, Out <: HList](n: Nat, u: U)(implicit replacer: ReplaceAt.Aux[L, n.N, U, (V, Out)]): Out = replacer(l, u)._2
+
+ /**
+ * Returns the first ''n'' elements of this `HList`. An explicit type argument must be provided. Available only if
+ * there is evidence that this `HList` has at least ''n'' elements.
+ */
+ def take[N <: Nat](implicit take: Take[L, N]): take.Out = take(l)
+
+ /**
+ * Returns the first ''n'' elements of this `HList`. Available only if there is evidence that this `HList` has at
+ * least ''n'' elements.
+ */
+ def take(n: Nat)(implicit take: Take[L, n.N]): take.Out = take(l)
+
+ /**
+ * Returns all but the first ''n'' elements of this `HList`. An explicit type argument must be provided. Available
+ * only if there is evidence that this `HList` has at least ''n'' elements.
+ */
+ def drop[N <: Nat](implicit drop: Drop[L, N]): drop.Out = drop(l)
+
+ /**
+ * Returns all but the first ''n'' elements of this `HList`. Available only if there is evidence that this `HList`
+ * has at least ''n'' elements.
+ */
+ def drop(n: Nat)(implicit drop: Drop[L, n.N]): drop.Out = drop(l)
+
+ /**
+ * Splits this `HList` at the ''nth'' element, returning the prefix and suffix as a pair. An explicit type argument
+ * must be provided. Available only if there is evidence that this `HList` has at least ''n'' elements.
+ */
+ def split[N <: Nat](implicit split: Split[L, N]): split.Out = split(l)
+
+ /**
+ * Splits this `HList` at the ''nth'' element, returning the prefix and suffix as a pair. Available only if there is
+ * evidence that this `HList` has at least ''n'' elements.
+ */
+ def split(n: Nat)(implicit split: Split[L, n.N]): split.Out = split(l)
+
+ /**
+ * Splits this `HList` at the ''nth'' element, returning the reverse of the prefix and suffix as a pair. An explicit
+ * type argument must be provided. Available only if there is evidence that this `HList` has at least ''n'' elements.
+ */
+ def reverse_split[N <: Nat](implicit split: ReverseSplit[L, N]): split.Out = split(l)
+
+ /**
+ * Splits this `HList` at the ''nth'' element, returning the reverse of the prefix and suffix as a pair. Available
+ * only if there is evidence that this `HList` has at least ''n'' elements.
+ */
+ def reverse_split(n: Nat)(implicit split: ReverseSplit[L, n.N]): split.Out = split(l)
+
+ /**
+ * Splits this `HList` at the first occurrence of an element of type `U`, returning the prefix and suffix as a pair.
+ * An explicit type argument must be provided. Available only if there is evidence that this `HList` has an element
+ * of type `U`.
+ */
+ def splitLeft[U](implicit splitLeft: SplitLeft[L, U]): splitLeft.Out = splitLeft(l)
+
+ /**
+ * Splits this `HList` at the first occurrence of an element of type `U`, returning reverse of the prefix and suffix
+ * as a pair. An explicit type argument must be provided. Available only if there is evidence that this `HList` has
+ * an element of type `U`.
+ */
+ def reverse_splitLeft[U](implicit splitLeft: ReverseSplitLeft[L, U]): splitLeft.Out = splitLeft(l)
+
+ /**
+ * Splits this `HList` at the last occurrence of an element of type `U`, returning the prefix and suffix as a pair.
+ * An explicit type argument must be provided. Available only if there is evidence that this `HList` has an element
+ * of type `U`.
+ */
+ def splitRight[U](implicit splitRight: SplitRight[L, U]): splitRight.Out = splitRight(l)
+
+ /**
+ * Splits this `HList` at the last occurrence of an element of type `U`, returning reverse of the prefix and suffix
+ * as a pair. An explicit type argument must be provided. Available only if there is evidence that this `HList` has
+ * an element of type `U`.
+ */
+ def reverse_splitRight[U](implicit splitRight: ReverseSplitRight[L, U]): splitRight.Out = splitRight(l)
+
+ /**
+ * Reverses this `HList`.
+ */
+ def reverse(implicit reverse: Reverse[L]): reverse.Out = reverse(l)
+
+ /**
+ * Maps a higher rank function across this `HList`.
+ */
+ def map(f: Poly)(implicit mapper: Mapper[f.type, L]): mapper.Out = mapper(l)
+
+ /**
+ * Flatmaps a higher rank function across this `HList`.
+ */
+ def flatMap(f: Poly)(implicit mapper: FlatMapper[f.type, L]): mapper.Out = mapper(l)
+
+ /**
+ * Replaces each element of this `HList` with a constant value.
+ */
+ def mapConst[C](c: C)(implicit mapper: ConstMapper[C, L]): mapper.Out = mapper(c, l)
+
+ /**
+ * Maps a higher rank function ''f'' across this `HList` and folds the result using monomorphic combining operator
+ * `op`. Available only if there is evidence that the result type of `f` at each element conforms to the argument
+ * type of ''op''.
+ */
+ def foldMap[R](z: R)(f: Poly)(op: (R, R) ⇒ R)(implicit folder: MapFolder[L, R, f.type]): R = folder(l, z, op)
+
+ /**
+ * Computes a left fold over this `HList` using the polymorphic binary combining operator `op`. Available only if
+ * there is evidence `op` can consume/produce all the partial results of the appropriate types.
+ */
+ def foldLeft[R](z: R)(op: Poly)(implicit folder: LeftFolder[L, R, op.type]): folder.Out = folder(l, z)
+
+ /**
+ * Computes a right fold over this `HList` using the polymorphic binary combining operator `op`. Available only if
+ * there is evidence `op` can consume/produce all the partial results of the appropriate types.
+ */
+ def foldRight[R](z: R)(op: Poly)(implicit folder: RightFolder[L, R, op.type]): folder.Out = folder(l, z)
+
+ /**
+ * Computes a left reduce over this `HList` using the polymorphic binary combining operator `op`. Available only if
+ * there is evidence that this `HList` has at least one element and that `op` can consume/produce all the partial
+ * results of the appropriate types.
+ */
+ def reduceLeft(op: Poly)(implicit reducer: LeftReducer[L, op.type]): reducer.Out = reducer(l)
+
+ /**
+ * Computes a right reduce over this `HList` using the polymorphic binary combining operator `op`. Available only if
+ * there is evidence that this `HList` has at least one element and that `op` can consume/produce all the partial
+ * results of the appropriate types.
+ */
+ def reduceRight(op: Poly)(implicit reducer: RightReducer[L, op.type]): reducer.Out = reducer(l)
+
+ /**
+ * Zips this `HList` with its argument `HList` returning an `HList` of pairs.
+ */
+ def zip[R <: HList](r: R)(implicit zipper: Zip[L :: R :: HNil]): zipper.Out = zipper(l :: r :: HNil)
+
+ /**
+ * Zips this `HList` of monomorphic function values with its argument `HList` of correspondingly typed function
+ * arguments returning the result of each application as an `HList`. Available only if there is evidence that the
+ * corresponding function and argument elements have compatible types.
+ */
+ def zipApply[A <: HList](a: A)(implicit zipper: ZipApply[L, A]): zipper.Out = zipper(l, a)
+
+ /**
+ * Zips this `HList` of `HList`s returning an `HList` of tuples. Available only if there is evidence that this
+ * `HList` has `HList` elements.
+ */
+ def zip(implicit zipper: Zip[L]): zipper.Out = zipper(l)
+
+ /**
+ * Zips this `HList` of `HList`s returning an `HList` of tuples. Available only if there is evidence that this
+ * `HList` has `HList` elements.
+ */
+ @deprecated("Use zip instead", "2.0.0")
+ def zipped(implicit zipper: Zip[L]): zipper.Out = zipper(l)
+
+ /**
+ * Unzips this `HList` of tuples returning a tuple of `HList`s. Available only if there is evidence that this
+ * `HList` has tuple elements.
+ */
+ def unzip(implicit unzipper: Unzip[L]): unzipper.Out = unzipper(l)
+
+ /**
+ * Unzips this `HList` of tuples returning a tuple of `HList`s. Available only if there is evidence that this
+ * `HList` has tuple elements.
+ */
+ @deprecated("Use unzip instead", "2.0.0")
+ def unzipped(implicit unzipper: Unzip[L]): unzipper.Out = unzipper(l)
+
+ /**
+ * Zips this `HList` with its argument `HList` of `HList`s, returning an `HList` of `HList`s with each element of
+ * this `HList` prepended to the corresponding `HList` element of the argument `HList`.
+ */
+ def zipOne[T <: HList](t: T)(implicit zipOne: ZipOne[L, T]): zipOne.Out = zipOne(l, t)
+
+ /**
+ * Zips this `HList` with a constant, resulting in an `HList` of tuples of the form
+ * ({element from this `HList`}, {supplied constant})
+ */
+ def zipConst[C](c: C)(implicit zipConst: ZipConst[C, L]): zipConst.Out = zipConst(c, l)
+
+ /**
+ * Zips this 'HList' with its argument 'HList' using argument 'Poly2', returning an 'HList'.
+ * Doesn't require this to be the same length as its 'HList' argument, but does require evidence that its
+ * 'Poly2' argument is defined at their intersection.
+ */
+ def zipWith[R <: HList, P <: Poly2](r: R)(p: P)(implicit zipWith: ZipWith[L, R, P]): zipWith.Out =
+ zipWith(l, r)
+
+ /**
+ * Transposes this `HList`.
+ */
+ def transpose(implicit transpose: Transposer[L]): transpose.Out = transpose(l)
+
+ /**
+ * Returns an `HList` typed as a repetition of the least upper bound of the types of the elements of this `HList`.
+ */
+ def unify(implicit unifier: Unifier[L]): unifier.Out = unifier(l)
+
+ /**
+ * Returns an `HList` with all elements that are subtypes of `B` typed as `B`.
+ */
+ def unifySubtypes[B](implicit subtypeUnifier: SubtypeUnifier[L, B]): subtypeUnifier.Out = subtypeUnifier(l)
+
+ /**
+ * Converts this `HList` to a correspondingly typed tuple.
+ */
+ def tupled(implicit tupler: Tupler[L]): tupler.Out = tupler(l)
+
+ /**
+ * Compute the length of this `HList`.
+ */
+ def length(implicit length: Length[L]): length.Out = length()
+
+ /**
+ * Compute the length of this `HList` as a runtime Int value.
+ */
+ def runtimeLength: Int = {
+ @tailrec def loop(l: HList, acc: Int): Int = l match {
+ case HNil ⇒ acc
+ case hd :: tl ⇒ loop(tl, acc + 1)
+ }
+
+ loop(l, 0)
+ }
+
+ /**
+ * Converts this `HList` to an ordinary `List` of elements typed as the least upper bound of the types of the elements
+ * of this `HList`.
+ */
+ def toList[Lub](implicit toList: ToList[L, Lub]): List[Lub] = toList(l)
+
+ /**
+ * Converts this `HList` to an `Array` of elements typed as the least upper bound of the types of the elements
+ * of this `HList`.
+ *
+ * It is advisable to specify the type parameter explicitly, because for many reference types, case classes in
+ * particular, the inferred type will be too precise (ie. `Product with Serializable with CC` for a typical case class
+ * `CC`) which interacts badly with the invariance of `Array`s.
+ */
+ def toArray[Lub](implicit toArray: ToArray[L, Lub]): Array[Lub] = toArray(runtimeLength, l, 0)
+
+ /**
+ * Converts this `HList` of values into a record with the provided keys.
+ */
+ def zipWithKeys[K <: HList](keys: K)(implicit withKeys: ZipWithKeys[K, L]): withKeys.Out = withKeys(keys, l)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/records.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/records.scala
new file mode 100644
index 0000000000..e36f60595b
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/records.scala
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2011 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+
+/**
+ * Record operations on `HList`'s with field-like elements.
+ *
+ * @author Miles Sabin
+ */
+final class RecordOps[L <: HList](l: L) {
+ import record._
+ import ops.record._
+
+ /**
+ * Returns the value associated with the singleton typed key k. Only available if this record has a field with
+ * with keyType equal to the singleton type k.T.
+ */
+ def get(k: Witness)(implicit selector: Selector[L, k.T]): selector.Out = selector(l)
+
+ /**
+ * Returns the value associated with the singleton typed key k. Only available if this record has a field with
+ * with keyType equal to the singleton type k.T.
+ *
+ * Note that this can creates a bogus ambiguity with `HListOps#apply` as described in
+ * https://issues.scala-lang.org/browse/SI-5142. If this method is accessible the conflict can be worked around by
+ * using HListOps#at instead of `HListOps#apply`.
+ */
+ def apply(k: Witness)(implicit selector: Selector[L, k.T]): selector.Out = selector(l)
+
+ /**
+ * Updates or adds to this record a field with key type F and value type F#valueType.
+ */
+ def updated[V](k: Witness, v: V)(implicit updater: Updater[L, FieldType[k.T, V]]): updater.Out = updater(l, field[k.T](v))
+
+ /**
+ * Updates a field having a value with type A by given function.
+ */
+ def updateWith[W](k: WitnessWith[FSL])(f: k.Out ⇒ W)(implicit modifier: Modifier[L, k.T, k.Out, W]): modifier.Out = modifier(l, f)
+ type FSL[K] = Selector[L, K]
+
+ /**
+ * Remove the field associated with the singleton typed key k, returning both the corresponding value and the updated
+ * record. Only available if this record has a field with keyType equal to the singleton type k.T.
+ */
+ def remove(k: Witness)(implicit remover: Remover[L, k.T]): remover.Out = remover(l)
+
+ /**
+ * Updates or adds to this record a field of type F.
+ */
+ def +[F](f: F)(implicit updater: Updater[L, F]): updater.Out = updater(l, f)
+
+ /**
+ * Remove the field associated with the singleton typed key k, returning the updated record. Only available if this
+ * record has a field with keyType equal to the singleton type k.T.
+ */
+ def -[V, Out <: HList](k: Witness)(implicit remover: Remover.Aux[L, k.T, (V, Out)]): Out = remover(l)._2
+
+ /**
+ * Rename the field associated with the singleton typed key oldKey. Only available if this
+ * record has a field with keyType equal to the singleton type oldKey.T.
+ */
+ def renameField(oldKey: Witness, newKey: Witness)(implicit renamer: Renamer[L, oldKey.T, newKey.T]): renamer.Out = renamer(l)
+
+ /**
+ * Returns the keys of this record as an HList of singleton typed values.
+ */
+ def keys(implicit keys: Keys[L]): keys.Out = keys()
+
+ /**
+ * Returns an HList of the values of this record.
+ */
+ def values(implicit values: Values[L]): values.Out = values(l)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/singletons.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/singletons.scala
new file mode 100644
index 0000000000..2460decf53
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/singletons.scala
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+
+import scala.language.experimental.macros
+
+object singleton {
+ implicit def mkSingletonOps(t: Any): SingletonOps = macro SingletonTypeMacros.mkSingletonOps
+
+ import tag._
+ implicit def narrowSymbol[S <: String](t: Symbol): Symbol @@ S = macro SingletonTypeMacros.narrowSymbol[S]
+}
+
+trait SingletonOps {
+ import record._
+
+ type T
+
+ /**
+ * Returns a Witness of the singleton type of this value.
+ */
+ val witness: Witness.Aux[T]
+
+ /**
+ * Narrows this value to its singleton type.
+ */
+ def narrow: T {} = witness.value
+
+ /**
+ * Returns the provided value tagged with the singleton type of this value as its key in a record-like structure.
+ */
+ def ->>[V](v: V): FieldType[T, V] = field[T](v)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/sized.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/sized.scala
new file mode 100644
index 0000000000..1f1d0b5474
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/sized.scala
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+
+import scala.collection.{ GenTraversable, GenTraversableLike }
+
+object sized {
+ implicit def genTraversableSizedConv[CC[X] <: GenTraversable[X], T](cc: CC[T])(implicit conv: CC[T] ⇒ GenTraversableLike[T, CC[T]]) = new SizedConv[T, CC[T]](cc)
+
+ implicit def stringSizedConv(s: String) = new SizedConv[Char, String](s)
+}
+
+final class SizedConv[A, Repr <% GenTraversableLike[A, Repr]](r: Repr) {
+ import ops.nat._
+ import Sized._
+
+ def sized[L <: Nat](implicit toInt: ToInt[L]) =
+ if (r.size == toInt()) Some(wrap[Repr, L](r)) else None
+
+ def sized(l: Nat)(implicit toInt: ToInt[l.N]) =
+ if (r.size == toInt()) Some(wrap[Repr, l.N](r)) else None
+
+ def ensureSized[L <: Nat](implicit toInt: ToInt[L]) = {
+ assert(r.size == toInt())
+ wrap[Repr, L](r)
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/std/functions.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/std/functions.scala
new file mode 100644
index 0000000000..62af221c97
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/std/functions.scala
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+package std
+
+/**
+ * Conversions between ordinary functions and `HList` functions.
+ *
+ * The implicits defined by this object enhance ordinary functions (resp. HList functions) with a `toProduct` (resp.
+ * `fromProduct`) method which creates an equivalently typed `HList` function (resp. ordinary function).
+ *
+ * @author Miles Sabin
+ */
+object function {
+ import ops.function._
+
+ implicit def fnHListOps[F](t: F)(implicit fnHLister: FnToProduct[F]) = new FnHListOps[fnHLister.Out] {
+ def toProduct = fnHLister(t)
+ }
+
+ implicit def fnUnHListOps[F](t: F)(implicit fnUnHLister: FnFromProduct[F]) = new FnUnHListOps[fnUnHLister.Out] {
+ def fromProduct = fnUnHLister(t)
+ }
+}
+
+trait FnHListOps[HLFn] {
+ def toProduct: HLFn
+}
+
+trait FnUnHListOps[F] {
+ def fromProduct: F
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/std/products.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/std/products.scala
new file mode 100644
index 0000000000..c6c62300d9
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/std/products.scala
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+package std
+
+object product {
+ implicit def productOps[P <: Product](p: P): ProductOps[P] = new ProductOps[P](p)
+}
+
+final class ProductOps[P](p: P) {
+ import ops.product._
+
+ /**
+ * Returns an `HList` containing the elements of this tuple.
+ */
+ def productElements(implicit gen: Generic[P]): gen.Repr = gen.to(p)
+
+ /**
+ * Compute the length of this product.
+ */
+ def length(implicit length: ProductLength[P]): length.Out = length(p)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/std/traversables.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/std/traversables.scala
new file mode 100644
index 0000000000..23e9b103ee
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/std/traversables.scala
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+package std
+
+import scala.collection.GenTraversable
+
+/**
+ * Conversions between `Traversables` and `HLists`.
+ *
+ * The implicit defined by this object enhances `Traversables` with a `toHList` method which constructs an equivalently
+ * typed [[shapeless.HList]] if possible.
+ *
+ * @author Miles Sabin
+ */
+object traversable {
+ implicit def traversableOps[T <% GenTraversable[_]](t: T) = new TraversableOps(t)
+}
+
+final class TraversableOps[T <% GenTraversable[_]](t: T) {
+ import ops.traversable._
+
+ def toHList[L <: HList](implicit fl: FromTraversable[L]): Option[L] = fl(t)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/std/tuples.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/std/tuples.scala
new file mode 100644
index 0000000000..681af14be0
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/std/tuples.scala
@@ -0,0 +1,402 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+package std
+
+trait LowPriorityTuple {
+ implicit def productTupleOps[P <: Product](p: P): TupleOps[P] = new TupleOps(p)
+}
+
+object tuple extends LowPriorityTuple {
+ implicit def unitTupleOps(u: Unit): TupleOps[Unit] = new TupleOps(u)
+
+ // Duplicated here from shapeless.HList so that explicit imports of tuple._ don't
+ // clobber the conversion to HListOps.
+ implicit def hlistOps[L <: HList](l: L): HListOps[L] = new HListOps(l)
+}
+
+final class TupleOps[T](t: T) {
+ import ops.tuple._
+
+ /**
+ * Returns an `HList` containing the elements of this tuple.
+ */
+ def productElements(implicit gen: Generic[T]): gen.Repr = gen.to(t)
+
+ /**
+ * Returns the first element of this tuple.
+ */
+ def head(implicit c: IsComposite[T]): c.H = c.head(t)
+
+ /**
+ * Returns that tail of this tuple. Available only if there is evidence that this tuple is composite.
+ */
+ def tail(implicit c: IsComposite[T]): c.T = c.tail(t)
+
+ /**
+ * Prepend the argument element to this tuple.
+ */
+ def +:[E](e: E)(implicit prepend: Prepend[Tuple1[E], T]): prepend.Out = prepend(Tuple1(e), t)
+
+ /**
+ * Append the argument element to this tuple.
+ */
+ def :+[E](e: E)(implicit prepend: Prepend[T, Tuple1[E]]): prepend.Out = prepend(t, Tuple1(e))
+
+ /**
+ * Append the argument tuple to this tuple.
+ */
+ def ++[U](u: U)(implicit prepend: Prepend[T, U]): prepend.Out = prepend(t, u)
+
+ /**
+ * Prepend the argument tuple to this tuple.
+ */
+ def ++:[U](u: U)(implicit prepend: Prepend[U, T]): prepend.Out = prepend(u, t)
+
+ /**
+ * Prepend the argument tuple to this tuple.
+ */
+ def :::[U](u: U)(implicit prepend: Prepend[U, T]): prepend.Out = prepend(u, t)
+
+ /**
+ * Prepend the reverse of the argument tuple to this tuple.
+ */
+ def reverse_:::[U](u: U)(implicit prepend: ReversePrepend[U, T]): prepend.Out = prepend(u, t)
+
+ /**
+ * Returns the ''nth'' element of this tuple. An explicit type argument must be provided. Available only if there is
+ * evidence that this tuple has at least ''n'' elements.
+ */
+ def apply[N <: Nat](implicit at: At[T, N]): at.Out = at(t)
+
+ /**
+ * Returns the ''nth'' element of this tuple. Available only if there is evidence that this tuple has at least ''n''
+ * elements.
+ */
+ def apply(n: Nat)(implicit at: At[T, n.N]): at.Out = at(t)
+
+ /**
+ * Returns the ''nth'' element of this tuple. An explicit type argument must be provided. Available only if there is
+ * evidence that this tuple has at least ''n'' elements.
+ */
+ def at[N <: Nat](implicit at: At[T, N]): at.Out = at(t)
+
+ /**
+ * Returns the ''nth'' element of this tuple. Available only if there is evidence that this tuple has at least ''n''
+ * elements.
+ */
+ def at(n: Nat)(implicit at: At[T, n.N]): at.Out = at(t)
+
+ /**
+ * Returns the last element of this tuple. Available only if there is evidence that this tuple is composite.
+ */
+ def last(implicit last: Last[T]): last.Out = last(t)
+
+ /**
+ * Returns a tuple consisting of all the elements of this tuple except the last. Available only if there is
+ * evidence that this tuple is composite.
+ */
+ def init(implicit init: Init[T]): init.Out = init(t)
+
+ /**
+ * Returns the first element of type `U` of this tuple. An explicit type argument must be provided. Available only
+ * if there is evidence that this tuple has an element of type `U`.
+ */
+ def select[U](implicit selector: Selector[T, U]): selector.Out = selector(t)
+
+ /**
+ * Returns all elements of type `U` of this tuple. An explicit type argument must be provided.
+ */
+ def filter[U](implicit filter: Filter[T, U]): filter.Out = filter(t)
+
+ /**
+ * Returns all elements of type different than `U` of this tuple. An explicit type argument must be provided.
+ */
+ def filterNot[U](implicit filterNot: FilterNot[T, U]): filterNot.Out = filterNot(t)
+
+ /**
+ * Returns the first element of type `U` of this tuple plus the remainder of the tuple. An explicit type argument
+ * must be provided. Available only if there is evidence that this tuple has an element of type `U`.
+ *
+ * The `Elem` suffix is here for consistency with the corresponding method name for `HList` and should be
+ * removed when the latter is removed.
+ */
+ def removeElem[U](implicit remove: Remove[T, U]): remove.Out = remove(t)
+
+ /**
+ * Returns the first elements of this tuple that have types in `S` plus the remainder of the tuple. An expicit
+ * type argument must be provided. Available only if there is evidence that this tuple contains elements with
+ * types in `S`.
+ */
+ def removeAll[S](implicit removeAll: RemoveAll[T, S]): removeAll.Out = removeAll(t)
+
+ /**
+ * Replaces the first element of type `U` of this tuple with the supplied value, also of type `U` returning both
+ * the replaced element and the updated tuple. Available only if there is evidence that this tuple has an element
+ * of type `U`.
+ */
+ def replace[U](u: U)(implicit replacer: Replacer[T, U, U]): replacer.Out = replacer(t, u)
+
+ class ReplaceTypeAux[U] {
+ def apply[V](v: V)(implicit replacer: Replacer[T, V, U]): replacer.Out = replacer(t, v)
+ }
+
+ /**
+ * Replaces the first element of type `U` of this tuple with the supplied value of type `V`, returning both the
+ * replaced element and the updated tuple. An explicit type argument must be provided for `U`. Available only if
+ * there is evidence that this tuple has an element of type `U`.
+ */
+ def replaceType[U] = new ReplaceTypeAux[U]
+
+ /**
+ * Replaces the first element of type `U` of this tuple with the supplied value, also of type `U`. Available only
+ * if there is evidence that this tuple has an element of type `U`.
+ *
+ * The `Elem` suffix is here for consistency with the corresponding method name for `HList` and should be
+ * removed when the latter is removed.
+ */
+ def updatedElem[U, R](u: U)(implicit replacer: Replacer.Aux[T, U, U, (U, R)]): R = replacer(t, u)._2
+
+ class UpdatedTypeAux[U] {
+ def apply[V, R](v: V)(implicit replacer: Replacer.Aux[T, V, U, (U, R)]): R = replacer(t, v)._2
+ }
+
+ /**
+ * Replaces the first element of type `U` of this tuple with the supplied value of type `V`. An explicit type
+ * argument must be provided for `U`. Available only if there is evidence that this tuple has an element of
+ * type `U`.
+ */
+ def updatedType[U] = new UpdatedTypeAux[U]
+
+ class UpdatedAtAux[N <: Nat] {
+ def apply[U, V, R](u: U)(implicit replacer: ReplaceAt.Aux[T, N, U, (V, R)]): R = replacer(t, u)._2
+ }
+
+ /**
+ * Replaces the ''nth' element of this tuple with the supplied value of type `U`. An explicit type argument
+ * must be provided for `N`. Available only if there is evidence that this tuple has at least ''n'' elements.
+ */
+ def updatedAt[N <: Nat] = new UpdatedAtAux[N]
+
+ /**
+ * Replaces the ''nth' element of this tuple with the supplied value of type `U`. Available only if there is
+ * evidence that this tuple has at least ''n'' elements.
+ */
+ def updatedAt[U, V, R](n: Nat, u: U)(implicit replacer: ReplaceAt.Aux[T, n.N, U, (V, R)]): R = replacer(t, u)._2
+
+ /**
+ * Returns the first ''n'' elements of this tuple. An explicit type argument must be provided. Available only if
+ * there is evidence that this tuple has at least ''n'' elements.
+ */
+ def take[N <: Nat](implicit take: Take[T, N]): take.Out = take(t)
+
+ /**
+ * Returns the first ''n'' elements of this tuple. Available only if there is evidence that this tuple has at
+ * least ''n'' elements.
+ */
+ def take(n: Nat)(implicit take: Take[T, n.N]): take.Out = take(t)
+
+ /**
+ * Returns all but the first ''n'' elements of this tuple. An explicit type argument must be provided. Available
+ * only if there is evidence that this tuple has at least ''n'' elements.
+ */
+ def drop[N <: Nat](implicit drop: Drop[T, N]): drop.Out = drop(t)
+
+ /**
+ * Returns all but the first ''n'' elements of this tuple. Available only if there is evidence that this tuple
+ * has at least ''n'' elements.
+ */
+ def drop(n: Nat)(implicit drop: Drop[T, n.N]): drop.Out = drop(t)
+
+ /**
+ * Splits this tuple at the ''nth'' element, returning the prefix and suffix as a pair. An explicit type argument
+ * must be provided. Available only if there is evidence that this tuple has at least ''n'' elements.
+ */
+ def split[N <: Nat](implicit split: Split[T, N]): split.Out = split(t)
+
+ /**
+ * Splits this tuple at the ''nth'' element, returning the prefix and suffix as a pair. Available only if there is
+ * evidence that this tuple has at least ''n'' elements.
+ */
+ def split(n: Nat)(implicit split: Split[T, n.N]): split.Out = split(t)
+
+ /**
+ * Splits this tuple at the ''nth'' element, returning the reverse of the prefix and suffix as a pair. An explicit
+ * type argument must be provided. Available only if there is evidence that this tuple has at least ''n'' elements.
+ */
+ def reverse_split[N <: Nat](implicit split: ReverseSplit[T, N]): split.Out = split(t)
+
+ /**
+ * Splits this tuple at the ''nth'' element, returning the reverse of the prefix and suffix as a pair. Available
+ * only if there is evidence that this tuple has at least ''n'' elements.
+ */
+ def reverse_split(n: Nat)(implicit split: ReverseSplit[T, n.N]): split.Out = split(t)
+
+ /**
+ * Splits this tuple at the first occurrence of an element of type `U`, returning the prefix and suffix as a pair.
+ * An explicit type argument must be provided. Available only if there is evidence that this tuple has an element
+ * of type `U`.
+ */
+ def splitLeft[U](implicit splitLeft: SplitLeft[T, U]): splitLeft.Out = splitLeft(t)
+
+ /**
+ * Splits this tuple at the first occurrence of an element of type `U`, returning reverse of the prefix and suffix
+ * as a pair. An explicit type argument must be provided. Available only if there is evidence that this tuple has
+ * an element of type `U`.
+ */
+ def reverse_splitLeft[U](implicit splitLeft: ReverseSplitLeft[T, U]): splitLeft.Out = splitLeft(t)
+
+ /**
+ * Splits this tuple at the last occurrence of an element of type `U`, returning the prefix and suffix as a pair.
+ * An explicit type argument must be provided. Available only if there is evidence that this tuple has an element
+ * of type `U`.
+ */
+ def splitRight[U](implicit splitRight: SplitRight[T, U]): splitRight.Out = splitRight(t)
+
+ /**
+ * Splits this tuple at the last occurrence of an element of type `U`, returning reverse of the prefix and suffix
+ * as a pair. An explicit type argument must be provided. Available only if there is evidence that this tuple has
+ * an element of type `U`.
+ */
+ def reverse_splitRight[U](implicit splitRight: ReverseSplitRight[T, U]): splitRight.Out = splitRight(t)
+
+ /**
+ * Reverses this tuple.
+ */
+ def reverse(implicit reverse: Reverse[T]): reverse.Out = reverse(t)
+
+ /**
+ * Maps a higher rank function across this tuple.
+ */
+ def map(f: Poly)(implicit mapper: Mapper[T, f.type]): mapper.Out = mapper(t)
+
+ /**
+ * Flatmaps a higher rank function across this tuple.
+ */
+ def flatMap(f: Poly)(implicit mapper: FlatMapper[T, f.type]): mapper.Out = mapper(t)
+
+ /**
+ * Replaces each element of this tuple with a constant value.
+ */
+ def mapConst[C](c: C)(implicit mapper: ConstMapper[T, C]): mapper.Out = mapper(t, c)
+
+ /**
+ * Maps a higher rank function ''f'' across this tuple and folds the result using monomorphic combining operator
+ * `op`. Available only if there is evidence that the result type of `f` at each element conforms to the argument
+ * type of ''op''.
+ */
+ def foldMap[R](z: R)(f: Poly)(op: (R, R) ⇒ R)(implicit folder: MapFolder[T, R, f.type]): R = folder(t, z, op)
+
+ /**
+ * Computes a left fold over this tuple using the polymorphic binary combining operator `op`. Available only if
+ * there is evidence `op` can consume/produce all the partial results of the appropriate types.
+ */
+ def foldLeft[R](z: R)(op: Poly)(implicit folder: LeftFolder[T, R, op.type]): folder.Out = folder(t, z)
+
+ /**
+ * Computes a right fold over this tuple using the polymorphic binary combining operator `op`. Available only if
+ * there is evidence `op` can consume/produce all the partial results of the appropriate types.
+ */
+ def foldRight[R](z: R)(op: Poly)(implicit folder: RightFolder[T, R, op.type]): folder.Out = folder(t, z)
+
+ /**
+ * Computes a left reduce over this tuple using the polymorphic binary combining operator `op`. Available only if
+ * there is evidence that this tuple has at least one element and that `op` can consume/produce all the partial
+ * results of the appropriate types.
+ */
+ def reduceLeft(op: Poly)(implicit reducer: LeftReducer[T, op.type]): reducer.Out = reducer(t)
+
+ /**
+ * Computes a right reduce over this tuple using the polymorphic binary combining operator `op`. Available only if
+ * there is evidence that this tuple has at least one element and that `op` can consume/produce all the partial
+ * results of the appropriate types.
+ */
+ def reduceRight(op: Poly)(implicit reducer: RightReducer[T, op.type]): reducer.Out = reducer(t)
+
+ /**
+ * Zips this tuple with its argument tuple returning a tuple of pairs.
+ */
+ def zip[R](r: R)(implicit transpose: Transposer[(T, R)]): transpose.Out = transpose((t, r))
+
+ /**
+ * Zips this tuple of monomorphic function values with its argument tuple of correspondingly typed function
+ * arguments returning the result of each application as a tuple. Available only if there is evidence that the
+ * corresponding function and argument elements have compatible types.
+ */
+ def zipApply[A](a: A)(implicit zipper: ZipApply[T, A]): zipper.Out = zipper(t, a)
+
+ /**
+ * Zips this tuple of tuples returning a tuple of tuples. Available only if there is evidence that this
+ * tuple has tuple elements.
+ */
+ def zip(implicit transpose: Transposer[T]): transpose.Out = transpose(t)
+
+ /**
+ * Unzips this tuple of tuples returning a tuple of tuples. Available only if there is evidence that this
+ * tuple has tuple elements.
+ */
+ def unzip(implicit transpose: Transposer[T]): transpose.Out = transpose(t)
+
+ /**
+ * Zips this tuple with its argument tuple of tuples, returning a tuple of tuples with each element of
+ * this tuple prepended to the corresponding tuple element of the argument tuple.
+ */
+ def zipOne[R](r: R)(implicit zipOne: ZipOne[T, R]): zipOne.Out = zipOne(t, r)
+
+ /**
+ * Zips this tuple with a constant, resulting in a tuple of tuples, with each element being of the form
+ * ({element from original tuple}, {supplied constant})
+ */
+ def zipConst[C](c: C)(implicit zipper: ZipConst[T, C]): zipper.Out = zipper(t, c)
+
+ /**
+ * Transposes this tuple.
+ */
+ def transpose(implicit transpose: Transposer[T]): transpose.Out = transpose(t)
+
+ /**
+ * Returns a tuple typed as a repetition of the least upper bound of the types of the elements of this tuple.
+ */
+ def unify(implicit unifier: Unifier[T]): unifier.Out = unifier(t)
+
+ /**
+ * Returns a tuple with all elements that are subtypes of `B` typed as `B`.
+ */
+ def unifySubtypes[B](implicit subtypeUnifier: SubtypeUnifier[T, B]): subtypeUnifier.Out = subtypeUnifier(t)
+
+ /**
+ * Compute the length of this tuple.
+ */
+ def length(implicit length: Length[T]): length.Out = length(t)
+
+ /**
+ * Converts this tuple to a `List` of elements typed as the least upper bound of the types of the elements
+ * of this tuple.
+ */
+ def toList[Lub](implicit toList: ToList[T, Lub]): toList.Out = toList(t)
+
+ /**
+ * Converts this tuple to an `Array` of elements typed as the least upper bound of the types of the elements
+ * of this tuple.
+ *
+ * It is advisable to specify the type parameter explicitly, because for many reference types, case classes in
+ * particular, the inferred type will be too precise (ie. `Product with Serializable with CC` for a typical case class
+ * `CC`) which interacts badly with the invariance of `Array`s.
+ */
+ def toArray[Lub](implicit toArray: ToArray[T, Lub]): toArray.Out = toArray(t)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/typeable.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/typeable.scala
new file mode 100644
index 0000000000..df422f3d3f
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/typeable.scala
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2011-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+
+object typeable {
+ implicit def typeableOps(t: Any): TypeableOps = new TypeableOps(t)
+}
+
+final class TypeableOps(t: Any) {
+ /**
+ * Cast the receiver to a value of type `U` if possible. This operation will be as precise wrt erasure as possible
+ * given the in-scope `Typeable` instances available.
+ */
+ def cast[U](implicit castU: Typeable[U]) = castU.cast(t)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/unions.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/unions.scala
new file mode 100644
index 0000000000..fbd12bb5cb
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/unions.scala
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2011 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+
+/**
+ * Discriminated union operations on `Coproducts`'s with field-like elements.
+ *
+ * @author Miles Sabin
+ */
+final class UnionOps[C <: Coproduct](c: C) {
+ import union._
+ import ops.union._
+
+ /**
+ * Returns the value associated with the singleton typed key k. Only available if this union has a field with
+ * with keyType equal to the singleton type k.T.
+ */
+ def get(k: Witness)(implicit selector: Selector[C, k.T]): selector.Out = selector(c)
+
+ /**
+ * Returns the value associated with the singleton typed key k. Only available if this union has a field with
+ * with keyType equal to the singleton type k.T.
+ *
+ * Note that this can creates a bogus ambiguity with `CoproductOps#apply` as described in
+ * https://issues.scala-lang.org/browse/SI-5142. If this method is accessible the conflict can be worked around by
+ * using CoproductOps#at instead of `CoproductOps#apply`.
+ */
+ def apply(k: Witness)(implicit selector: Selector[C, k.T]): selector.Out = selector(c)
+}
+
diff --git a/akka-parsing/src/main/scala/akka/shapeless/syntax/zipper.scala b/akka-parsing/src/main/scala/akka/shapeless/syntax/zipper.scala
new file mode 100644
index 0000000000..baf4bc25a9
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/syntax/zipper.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2012-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+package syntax
+
+object zipper {
+ implicit def toZipper[C](c: C) = new ZipperOps(c)
+}
+
+/** Enhances values of any type with a representation via `Generic` with a method supporting conversion to a `Zipper`. */
+class ZipperOps[C](c: C) {
+ def toZipper[CL <: HList](implicit gen: Generic.Aux[C, CL]) = Zipper(c)
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/test/package.scala b/akka-parsing/src/main/scala/akka/shapeless/test/package.scala
new file mode 100644
index 0000000000..602182bb3e
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/test/package.scala
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2014 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.reflect.runtime.universe._
+
+package object test {
+ def typed[T](t: ⇒ T) {}
+
+ def sameTyped[T](t1: ⇒ T)(t2: ⇒ T) {}
+
+ def showType[T: TypeTag]: String = typeOf[T].normalize.toString
+
+ def showType[T: TypeTag](t: ⇒ T): String = typeOf[T].normalize.toString
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/test/typechecking.scala b/akka-parsing/src/main/scala/akka/shapeless/test/typechecking.scala
new file mode 100644
index 0000000000..9e5a43ff1a
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/test/typechecking.scala
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2013 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless.test
+
+import scala.language.experimental.macros
+
+import java.util.regex.Pattern
+
+import scala.reflect.macros.{ Context, TypecheckException }
+
+/**
+ * A utility which ensures that a code fragment does not typecheck.
+ *
+ * Credit: Stefan Zeiger (@StefanZeiger)
+ */
+object illTyped {
+ def apply(code: String): Unit = macro applyImplNoExp
+ def apply(code: String, expected: String): Unit = macro applyImpl
+
+ def applyImplNoExp(c: Context)(code: c.Expr[String]) = applyImpl(c)(code, null)
+
+ def applyImpl(c: Context)(code: c.Expr[String], expected: c.Expr[String]): c.Expr[Unit] = {
+ import c.universe._
+
+ val Expr(Literal(Constant(codeStr: String))) = code
+ val (expPat, expMsg) = expected match {
+ case null ⇒ (null, "Expected some error.")
+ case Expr(Literal(Constant(s: String))) ⇒
+ (Pattern.compile(s, Pattern.CASE_INSENSITIVE), "Expected error matching: " + s)
+ }
+
+ try {
+ c.typeCheck(c.parse("{ " + codeStr + " }"))
+ c.abort(c.enclosingPosition, "Type-checking succeeded unexpectedly.\n" + expMsg)
+ } catch {
+ case e: TypecheckException ⇒
+ val msg = e.getMessage
+ if ((expected ne null) && !(expPat.matcher(msg)).matches)
+ c.abort(c.enclosingPosition, "Type-checking failed in an unexpected way.\n" + expMsg + "\nActual error: " + msg)
+ }
+
+ reify(())
+ }
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/typeable.scala b/akka-parsing/src/main/scala/akka/shapeless/typeable.scala
new file mode 100644
index 0000000000..2789c7e371
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/typeable.scala
@@ -0,0 +1,248 @@
+/*
+ * Copyright (c) 2011-14 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+/**
+ * Type class supporting type safe cast.
+ *
+ * @author Miles Sabin
+ */
+trait Typeable[U] {
+ def cast(t: Any): Option[U]
+}
+
+trait LowPriorityTypeable {
+ import scala.reflect.ClassTag
+
+ /**
+ * Default `Typeable` instance. Note that this is safe only up to erasure.
+ */
+ implicit def dfltTypeable[U](implicit mU: ClassTag[U]): Typeable[U] =
+ new Typeable[U] {
+ def cast(t: Any): Option[U] = {
+ if (t == null || (mU.runtimeClass isAssignableFrom t.getClass)) Some(t.asInstanceOf[U]) else None
+ }
+ }
+}
+
+/**
+ * Provides instances of `Typeable`. Also provides an implicit conversion which enhances arbitrary values with a
+ * `cast[U]` method.
+ */
+object Typeable extends TupleTypeableInstances with LowPriorityTypeable {
+ import java.{ lang ⇒ jl }
+ import scala.collection.{ GenMap, GenTraversable }
+ import scala.reflect.ClassTag
+ import syntax.typeable._
+
+ case class ValueTypeable[U, B](cB: Class[B]) extends Typeable[U] {
+ def cast(t: Any): Option[U] = {
+ if (t == null || (cB isAssignableFrom t.getClass)) Some(t.asInstanceOf[U]) else None
+ }
+ }
+
+ /** Typeable instance for `Byte`. */
+ implicit val byteTypeable: Typeable[Byte] = ValueTypeable[Byte, jl.Byte](classOf[jl.Byte])
+ /** Typeable instance for `Short`. */
+ implicit val shortTypeable: Typeable[Short] = ValueTypeable[Short, jl.Short](classOf[jl.Short])
+ /** Typeable instance for `Char`. */
+ implicit val charTypeable: Typeable[Char] = ValueTypeable[Char, jl.Character](classOf[jl.Character])
+ /** Typeable instance for `Int`. */
+ implicit val intTypeable: Typeable[Int] = ValueTypeable[Int, jl.Integer](classOf[jl.Integer])
+ /** Typeable instance for `Long`. */
+ implicit val longTypeable: Typeable[Long] = ValueTypeable[Long, jl.Long](classOf[jl.Long])
+ /** Typeable instance for `Float`. */
+ implicit val floatTypeable: Typeable[Float] = ValueTypeable[Float, jl.Float](classOf[jl.Float])
+ /** Typeable instance for `Double`. */
+ implicit val doubleTypeable: Typeable[Double] = ValueTypeable[Double, jl.Double](classOf[jl.Double])
+ /** Typeable instance for `Boolean`. */
+ implicit val booleanTypeable: Typeable[Boolean] = ValueTypeable[Boolean, jl.Boolean](classOf[jl.Boolean])
+ /** Typeable instance for `Unit`. */
+ implicit val unitTypeable: Typeable[Unit] = ValueTypeable[Unit, runtime.BoxedUnit](classOf[runtime.BoxedUnit])
+
+ def isValClass[T](clazz: Class[T]) =
+ (classOf[jl.Number] isAssignableFrom clazz) ||
+ clazz == classOf[jl.Boolean] ||
+ clazz == classOf[jl.Character] ||
+ clazz == classOf[runtime.BoxedUnit]
+
+ /** Typeable instance for `AnyVal`. */
+ implicit val anyValTypeable: Typeable[AnyVal] =
+ new Typeable[AnyVal] {
+ def cast(t: Any): Option[AnyVal] = {
+ if (t == null || isValClass(t.getClass)) Some(t.asInstanceOf[AnyVal]) else None
+ }
+ }
+
+ /** Typeable instance for `AnyRef`. */
+ implicit val anyRefTypeable: Typeable[AnyRef] =
+ new Typeable[AnyRef] {
+ def cast(t: Any): Option[AnyRef] = {
+ if (t != null && isValClass(t.getClass)) None else Some(t.asInstanceOf[AnyRef])
+ }
+ }
+
+ /** Typeable instance for `Option`. */
+ implicit def optionTypeable[T](implicit castT: Typeable[T]): Typeable[Option[T]] =
+ new Typeable[Option[T]] {
+ def cast(t: Any): Option[Option[T]] = {
+ if (t == null) Some(t.asInstanceOf[Option[T]])
+ else if (t.isInstanceOf[Option[_]]) {
+ val o = t.asInstanceOf[Option[_]]
+ if (o.isEmpty) Some(t.asInstanceOf[Option[T]])
+ else for (e ← o; _ ← e.cast[T]) yield t.asInstanceOf[Option[T]]
+ } else None
+ }
+ }
+
+ /** Typeable instance for `Either`. */
+ implicit def eitherTypeable[A, B](implicit castA: Typeable[Left[A, B]], castB: Typeable[Right[A, B]]): Typeable[Either[A, B]] =
+ new Typeable[Either[A, B]] {
+ def cast(t: Any): Option[Either[A, B]] = {
+ t.cast[Left[A, B]] orElse t.cast[Right[A, B]]
+ }
+ }
+
+ /** Typeable instance for `Left`. */
+ implicit def leftTypeable[A, B](implicit castA: Typeable[A]): Typeable[Left[A, B]] =
+ new Typeable[Left[A, B]] {
+ def cast(t: Any): Option[Left[A, B]] = {
+ if (t == null) Some(t.asInstanceOf[Left[A, B]])
+ else if (t.isInstanceOf[Left[_, _]]) {
+ val l = t.asInstanceOf[Left[_, _]]
+ for (a ← l.a.cast[A]) yield t.asInstanceOf[Left[A, B]]
+ } else None
+ }
+ }
+
+ /** Typeable instance for `Right`. */
+ implicit def rightTypeable[A, B](implicit castB: Typeable[B]): Typeable[Right[A, B]] =
+ new Typeable[Right[A, B]] {
+ def cast(t: Any): Option[Right[A, B]] = {
+ if (t == null) Some(t.asInstanceOf[Right[A, B]])
+ else if (t.isInstanceOf[Right[_, _]]) {
+ val r = t.asInstanceOf[Right[_, _]]
+ for (b ← r.b.cast[B]) yield t.asInstanceOf[Right[A, B]]
+ } else None
+ }
+ }
+
+ /**
+ * Typeable instance for `GenTraversable`.
+ * Note that the contents be will tested for conformance to the element type.
+ */
+ implicit def genTraversableTypeable[CC[X] <: GenTraversable[X], T](implicit mCC: ClassTag[CC[_]], castT: Typeable[T]): Typeable[CC[T]] =
+ new Typeable[CC[T]] {
+ def cast(t: Any): Option[CC[T]] =
+ if (t == null) Some(t.asInstanceOf[CC[T]])
+ else if (mCC.runtimeClass isAssignableFrom t.getClass) {
+ val cc = t.asInstanceOf[CC[Any]]
+ if (cc.forall(_.cast[T].isDefined)) Some(t.asInstanceOf[CC[T]])
+ else None
+ } else None
+ }
+
+ /** Typeable instance for `Map`. Note that the contents will be tested for conformance to the key/value types. */
+ implicit def genMapTypeable[M[X, Y], T, U](implicit ev: M[T, U] <:< GenMap[T, U], mM: ClassTag[M[_, _]], castTU: Typeable[(T, U)]): Typeable[M[T, U]] =
+ new Typeable[M[T, U]] {
+ def cast(t: Any): Option[M[T, U]] =
+ if (t == null) Some(t.asInstanceOf[M[T, U]])
+ else if (mM.runtimeClass isAssignableFrom t.getClass) {
+ val m = t.asInstanceOf[GenMap[Any, Any]]
+ if (m.forall(_.cast[(T, U)].isDefined)) Some(t.asInstanceOf[M[T, U]])
+ else None
+ } else None
+ }
+
+ /** Typeable instance for `HNil`. */
+ implicit val hnilTypeable: Typeable[HNil] =
+ new Typeable[HNil] {
+ def cast(t: Any): Option[HNil] = if (t == null || t.isInstanceOf[HNil]) Some(t.asInstanceOf[HNil]) else None
+ }
+
+ /** Typeable instance for `HList`s. Note that the contents will be tested for conformance to the element types. */
+ implicit def hlistTypeable[H, T <: HList](implicit castH: Typeable[H], castT: Typeable[T]): Typeable[H :: T] =
+ new Typeable[H :: T] {
+ def cast(t: Any): Option[H :: T] = {
+ if (t == null) Some(t.asInstanceOf[H :: T])
+ else if (t.isInstanceOf[::[_, _ <: HList]]) {
+ val l = t.asInstanceOf[::[_, _ <: HList]]
+ for (hd ← l.head.cast[H]; tl ← (l.tail: Any).cast[T]) yield t.asInstanceOf[H :: T]
+ } else None
+ }
+ }
+
+ /** Typeable instance for `CNil`. */
+ implicit val cnilTypeable: Typeable[CNil] =
+ new Typeable[CNil] {
+ def cast(t: Any): Option[CNil] = None
+ }
+
+ /**
+ * Typeable instance for `Coproduct`s.
+ * Note that the contents will be tested for conformance to one of the element types.
+ */
+ implicit def coproductTypeable[H, T <: Coproduct](implicit castH: Typeable[H], castT: Typeable[T]): Typeable[H :+: T] =
+ new Typeable[H :+: T] {
+ def cast(t: Any): Option[H :+: T] = {
+ t.cast[Inl[H, T]] orElse t.cast[Inr[H, T]]
+ }
+ }
+
+ /** Typeable instance for `Inl`. */
+ implicit def inlTypeable[H, T <: Coproduct](implicit castH: Typeable[H]): Typeable[Inl[H, T]] =
+ new Typeable[Inl[H, T]] {
+ def cast(t: Any): Option[Inl[H, T]] = {
+ if (t == null) Some(t.asInstanceOf[Inl[H, T]])
+ else if (t.isInstanceOf[Inl[_, _ <: Coproduct]]) {
+ val l = t.asInstanceOf[Inl[_, _ <: Coproduct]]
+ for (hd ← l.head.cast[H]) yield t.asInstanceOf[Inl[H, T]]
+ } else None
+ }
+ }
+
+ /** Typeable instance for `Inr`. */
+ implicit def inrTypeable[H, T <: Coproduct](implicit castT: Typeable[T]): Typeable[Inr[H, T]] =
+ new Typeable[Inr[H, T]] {
+ def cast(t: Any): Option[Inr[H, T]] = {
+ if (t == null) Some(t.asInstanceOf[Inr[H, T]])
+ else if (t.isInstanceOf[Inr[_, _ <: Coproduct]]) {
+ val r = t.asInstanceOf[Inr[_, _ <: Coproduct]]
+ for (tl ← r.tail.cast[T]) yield t.asInstanceOf[Inr[H, T]]
+ } else None
+ }
+ }
+}
+
+/**
+ * Extractor for use of `Typeable` in pattern matching.
+ *
+ * Thanks to Stacy Curl for the idea.
+ *
+ * @author Miles Sabin
+ */
+trait TypeCase[T] {
+ def unapply(t: Any): Option[T]
+}
+
+object TypeCase {
+ import syntax.typeable._
+ def apply[T: Typeable]: TypeCase[T] = new TypeCase[T] {
+ def unapply(t: Any): Option[T] = t.cast[T]
+ }
+}
+
diff --git a/akka-parsing/src/main/scala/akka/shapeless/typeclass.scala b/akka-parsing/src/main/scala/akka/shapeless/typeclass.scala
new file mode 100644
index 0000000000..dd5909a361
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/typeclass.scala
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2013-14 Lars Hupel, Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import scala.language.experimental.macros
+
+import scala.reflect.macros.Context
+
+/**
+ * A type class abstracting over the `product` operation of type classes over
+ * types of kind `*`, as well as deriving instances using an isomorphism.
+ */
+trait ProductTypeClass[C[_]] {
+ /**
+ * Given a type class instance for `H`, and a type class instance for a
+ * product, produce a type class instance for the product prepended with `H`.
+ */
+ def product[H, T <: HList](CHead: C[H], CTail: C[T]): C[H :: T]
+
+ /**
+ * The empty product.
+ */
+ def emptyProduct: C[HNil]
+
+ /**
+ * Given an isomorphism between `F` and `G`, and a type class instance for `G`,
+ * produce a type class instance for `F`.
+ */
+ def project[F, G](instance: ⇒ C[G], to: F ⇒ G, from: G ⇒ F): C[F]
+}
+
+trait ProductTypeClassCompanion[C[_]] {
+ object auto {
+ implicit def derive[T](implicit ev: ProductTypeClass[C]): C[T] = macro GenericMacros.deriveProductInstance[C, T]
+ }
+
+ def apply[T](implicit ev: ProductTypeClass[C]): C[T] = macro GenericMacros.deriveProductInstance[C, T]
+}
+
+/**
+ * A type class abstracting over the `product` operation of type classes over
+ * types of kind `*`, as well as deriving instances using an isomorphism.
+ * Refines ProductTypeClass with the addition of runtime `String` labels
+ * corresponding to the names of the product elements.
+ */
+trait LabelledProductTypeClass[C[_]] {
+ /**
+ * Given a type class instance for `H`, and a type class instance for a
+ * product, produce a type class instance for the product prepended with `H`.
+ */
+ def product[H, T <: HList](name: String, CHead: C[H], CTail: C[T]): C[H :: T]
+
+ /**
+ * The empty product.
+ */
+ def emptyProduct: C[HNil]
+
+ /**
+ * Given an isomorphism between `F` and `G`, and a type class instance for `G`,
+ * produce a type class instance for `F`.
+ */
+ def project[F, G](instance: ⇒ C[G], to: F ⇒ G, from: G ⇒ F): C[F]
+}
+
+trait LabelledProductTypeClassCompanion[C[_]] {
+ object auto {
+ implicit def derive[T](implicit ev: LabelledProductTypeClass[C]): C[T] = macro GenericMacros.deriveLabelledProductInstance[C, T]
+ }
+
+ def apply[T](implicit ev: LabelledProductTypeClass[C]): C[T] = macro GenericMacros.deriveLabelledProductInstance[C, T]
+}
+
+/**
+ * A type class additinally abstracting over the `coproduct` operation of type
+ * classes over types of kind `*`.
+ */
+trait TypeClass[C[_]] extends ProductTypeClass[C] {
+ /**
+ * Given two type class instances for `L` and `R`, produce a type class
+ * instance for the coproduct `L :+: R`.
+ */
+ def coproduct[L, R <: Coproduct](CL: ⇒ C[L], CR: ⇒ C[R]): C[L :+: R]
+
+ /**
+ * The empty coproduct
+ */
+ def emptyCoproduct: C[CNil]
+}
+
+trait TypeClassCompanion[C[_]] {
+ object auto {
+ implicit def derive[T](implicit ev: TypeClass[C]): C[T] = macro GenericMacros.deriveInstance[C, T]
+ }
+
+ def apply[T](implicit ev: TypeClass[C]): C[T] = macro GenericMacros.deriveInstance[C, T]
+}
+
+/**
+ * A type class additinally abstracting over the `coproduct` operation of type
+ * classes over types of kind `*`.
+ *
+ * Name hints can be safely ignored.
+ */
+trait LabelledTypeClass[C[_]] extends LabelledProductTypeClass[C] {
+ /**
+ * Given two type class instances for `L` and `R`, produce a type class
+ * instance for the coproduct `L :+: R`.
+ */
+ def coproduct[L, R <: Coproduct](name: String, CL: ⇒ C[L], CR: ⇒ C[R]): C[L :+: R]
+
+ /**
+ * The empty coproduct
+ */
+ def emptyCoproduct: C[CNil]
+}
+
+trait LabelledTypeClassCompanion[C[_]] {
+ object auto {
+ implicit def derive[T](implicit ev: LabelledTypeClass[C]): C[T] = macro GenericMacros.deriveLabelledInstance[C, T]
+ }
+
+ def apply[T](implicit ev: LabelledTypeClass[C]): C[T] = macro GenericMacros.deriveLabelledInstance[C, T]
+}
+
+final class DeriveConstructors
+
+object TypeClass {
+ implicit val deriveConstructors: DeriveConstructors = new DeriveConstructors()
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/typeoperators.scala b/akka-parsing/src/main/scala/akka/shapeless/typeoperators.scala
new file mode 100644
index 0000000000..4b7d133d1e
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/typeoperators.scala
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2011 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+object tag {
+ def apply[U] = new Tagger[U]
+
+ trait Tagged[U]
+ type @@[+T, U] = T with Tagged[U]
+
+ class Tagger[U] {
+ def apply[T](t: T): T @@ U = t.asInstanceOf[T @@ U]
+ }
+}
+
+object newtype {
+ /**
+ * Creates a value of the newtype given a value of its representation type.
+ */
+ def apply[Repr, Ops](r: Repr): Newtype[Repr, Ops] = r.asInstanceOf[Any with Newtype[Repr, Ops]]
+
+ /**
+ * New type with `Repr` as representation type and operations provided by `Ops`.
+ *
+ * Values of the newtype will not add any additional boxing beyond what's required for
+ * values of the representation type to conform to Any. In practice this means that value
+ * types will receive their standard Scala AnyVal boxing and reference types will be unboxed.
+ */
+ type Newtype[Repr, Ops] = { type Tag = NewtypeTag[Repr, Ops] }
+ trait NewtypeTag[Repr, Ops]
+
+ /**
+ * Implicit conversion of newtype to `Ops` type for the selection of `Ops` newtype operations.
+ *
+ * The implicit conversion `Repr => Ops` would typically be provided by publishing the companion
+ * object of the `Ops` type as an implicit value.
+ */
+ implicit def newtypeOps[Repr, Ops](t: Newtype[Repr, Ops])(implicit mkOps: Repr ⇒ Ops): Ops = t.asInstanceOf[Repr]
+}
+
+/**
+ * Type class witnessing the least upper bound of a pair of types and providing conversions from each to their common
+ * supertype.
+ *
+ * @author Miles Sabin
+ */
+trait Lub[-A, -B, +Out] {
+ def left(a: A): Out
+ def right(b: B): Out
+}
+
+object Lub {
+ implicit def lub[T] = new Lub[T, T, T] {
+ def left(a: T): T = a
+ def right(b: T): T = b
+ }
+}
+
+/**
+ * Type class witnessing that type `P` is equal to `F[T]` for some higher kinded type `F[_]` and type `T`.
+ *
+ * @author Miles Sabin
+ */
+trait Unpack1[-P, F[_], T]
+
+object Unpack1 {
+ implicit def unpack1[F[_], T]: Unpack1[F[T], F, T] = new Unpack1[F[T], F, T] {}
+}
+
+/**
+ * Type class witnessing that type `P` is equal to `F[T, U]` for some higher kinded type `F[_, _]` and types `T` and `U`.
+ *
+ * @author Miles Sabin
+ */
+trait Unpack2[-P, F[_, _], T, U]
+
+object Unpack2 {
+ implicit def unpack2[F[_, _], T, U]: Unpack2[F[T, U], F, T, U] = new Unpack2[F[T, U], F, T, U] {}
+}
diff --git a/akka-parsing/src/main/scala/akka/shapeless/zipper.scala b/akka-parsing/src/main/scala/akka/shapeless/zipper.scala
new file mode 100644
index 0000000000..8f473e32ce
--- /dev/null
+++ b/akka-parsing/src/main/scala/akka/shapeless/zipper.scala
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2012-13 Miles Sabin
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.shapeless
+
+import ops.hlist.{ IsHCons, ReversePrepend, Split, SplitLeft }
+
+/**
+ * Generic Zipper for any type with a representation via `Generic`.
+ *
+ * @author Miles Sabin
+ */
+case class Zipper[C, L <: HList, R <: HList, P](prefix: L, suffix: R, parent: P) {
+ import ops.zipper._
+
+ type Self = Zipper[C, L, R, P]
+
+ /** Move the cursor one place to the right. Available only if not already at the rightmost element. */
+ def right(implicit right: Right[Self]): right.Out = right(this)
+
+ /** Move the cursor one place to the left. Available only if not already at the leftmost element. */
+ def left(implicit left: Left[Self]): left.Out = left(this)
+
+ /** Moves the cursor to the leftmost position. */
+ def first(implicit first: First[Self]): first.Out = first(this)
+
+ /** Moves the cursor to the rightmost position. */
+ def last(implicit last: Last[Self]): last.Out = last(this)
+
+ /**
+ * Move the cursor ''n'' places to the right. Requires an explicit type argument. Available only if there are
+ * ''n'' places to the right of the cursor.
+ */
+ def rightBy[N <: Nat](implicit rightBy: RightBy[Self, N]) = rightBy(this)
+
+ /** Move the cursor ''n'' places to the right. Available only if there are ''n'' places to the right of the cursor. */
+ def rightBy(n: Nat)(implicit rightBy: RightBy[Self, n.N]) = rightBy(this)
+
+ /**
+ * Move the cursor ''n'' places to the left. Requires an explicit type argument. Available only if there are
+ * ''n'' places to the left of the cursor.
+ */
+ def leftBy[N <: Nat](implicit leftBy: LeftBy[Self, N]) = leftBy(this)
+
+ /** Move the cursor ''n'' places to the left. Available only if there are ''n'' places to the right of the cursor. */
+ def leftBy(n: Nat)(implicit leftBy: LeftBy[Self, n.N]) = leftBy(this)
+
+ /**
+ * Move the cursor to the first element of type `T` to the right. Available only if there is an element of type `T`
+ * to the right of the cursor.
+ */
+ def rightTo[T](implicit rightTo: RightTo[Self, T]) = rightTo(this)
+
+ /**
+ * Move the cursor to the first element of type `T` to the left. Available only if there is an element of type `T`
+ * to the left of the cursor.
+ */
+ def leftTo[T](implicit leftTo: LeftTo[Self, T]) = leftTo(this)
+
+ /**
+ * Moves the cursor up to the next level. The element at the new cursor position will be updated with the
+ * reification of the current level.
+ */
+ def up(implicit up: Up[Self]): up.Out = up(this)
+
+ /**
+ * Moves the cursor down to the next level, placing it at the first element on the left. Available only if the
+ * element current at the cursor has a representation via `Generic`.
+ */
+ def down(implicit down: Down[Self]): down.Out = down(this)
+
+ /** Moves the cursor to root of this Zipper. */
+ def root(implicit root: Root[Self]): root.Out = root(this)
+
+ /** Returns the element at the cursor. Available only if the underlying `HList` is non-empty. */
+ def get(implicit get: Get[Self]): get.Out = get(this)
+
+ /** Replaces the element at the cursor. Available only if the underlying `HList` is non-empty. */
+ def put[E](e: E)(implicit put: Put[Self, E]): put.Out = put(this, e)
+
+ /** Inserts a new element to the left of the cursor. */
+ def insert[E](e: E)(implicit insert: Insert[Self, E]): insert.Out = insert(this, e)
+
+ /** Removes the element at the cursor. Available only if the underlying `HList` is non-empty. */
+ def delete(implicit delete: Delete[Self]): delete.Out = delete(this)
+
+ /** Reifies the current level of this `Zipper`. */
+ def reify(implicit reify: Reify[Self]): reify.Out = reify(this)
+}
+
+object Zipper {
+ def apply[C, CL <: HList](c: C)(implicit gen: Generic.Aux[C, CL]): Zipper[C, HNil, CL, None.type] =
+ Zipper[C, HNil, CL, None.type](HNil, gen.to(c), None)
+}