Merge pull request #156 from jboner/wip-1504-config-comments-patriknw

Rewrite config comments
This commit is contained in:
patriknw 2011-12-14 08:47:19 -08:00
commit d9e9efe2d7
46 changed files with 883 additions and 455 deletions

View file

@ -21,7 +21,7 @@ import java.util.Set;
* is a key in a JSON object; it's just a string that's the key in a map. A
* "path" is a parseable expression with a syntax and it refers to a series of
* keys. Path expressions are described in the <a
* href="https://github.com/havocp/config/blob/master/HOCON.md">spec for
* href="https://github.com/typesafehub/config/blob/master/HOCON.md">spec for
* Human-Optimized Config Object Notation</a>. In brief, a path is
* period-separated so "a.b.c" looks for key c in object b in object a in the
* root object. Sometimes double quotes are needed around special characters in
@ -97,7 +97,7 @@ public interface Config extends ConfigMergeable {
/**
* Returns a replacement config with all substitutions (the
* <code>${foo.bar}</code> syntax, see <a
* href="https://github.com/havocp/config/blob/master/HOCON.md">the
* href="https://github.com/typesafehub/config/blob/master/HOCON.md">the
* spec</a>) resolved. Substitutions are looked up using this
* <code>Config</code> as the root object, that is, a substitution
* <code>${foo.bar}</code> will be replaced with the result of
@ -395,7 +395,8 @@ public interface Config extends ConfigMergeable {
* Gets a value as a size in bytes (parses special strings like "128M"). If
* the value is already a number, then it's left alone; if it's a string,
* it's parsed understanding unit suffixes such as "128K", as documented in
* the <a href="https://github.com/havocp/config/blob/master/HOCON.md">the
* the <a
* href="https://github.com/typesafehub/config/blob/master/HOCON.md">the
* spec</a>.
*
* @param path
@ -414,7 +415,7 @@ public interface Config extends ConfigMergeable {
* Get value as a duration in milliseconds. If the value is already a
* number, then it's left alone; if it's a string, it's parsed understanding
* units suffixes like "10m" or "5ns" as documented in the <a
* href="https://github.com/havocp/config/blob/master/HOCON.md">the
* href="https://github.com/typesafehub/config/blob/master/HOCON.md">the
* spec</a>.
*
* @param path

View file

@ -5,7 +5,8 @@ package com.typesafe.config;
/**
* All exceptions thrown by the library are subclasses of ConfigException.
* All exceptions thrown by the library are subclasses of
* <code>ConfigException</code>.
*/
public abstract class ConfigException extends RuntimeException {
private static final long serialVersionUID = 1L;
@ -338,6 +339,9 @@ public abstract class ConfigException extends RuntimeException {
sb.append(p.problem());
sb.append(", ");
}
if (sb.length() == 0)
throw new ConfigException.BugOrBroken(
"ValidationFailed must have a non-empty list of problems");
sb.setLength(sb.length() - 2); // chop comma and space
return sb.toString();

View file

@ -295,7 +295,7 @@ public final class ConfigFactory {
/**
* Converts a Java {@link java.util.Properties} object to a
* {@link ConfigObject} using the rules documented in the <a
* href="https://github.com/havocp/config/blob/master/HOCON.md">HOCON
* href="https://github.com/typesafehub/config/blob/master/HOCON.md">HOCON
* spec</a>. The keys in the <code>Properties</code> object are split on the
* period character '.' and treated as paths. The values will all end up as
* string values. If you have both "a=foo" and "a.b=bar" in your properties

View file

@ -27,8 +27,8 @@ public interface ConfigMergeable {
*
* <p>
* The semantics of merging are described in the <a
* href="https://github.com/havocp/config/blob/master/HOCON.md">spec for
* HOCON</a>.
* href="https://github.com/typesafehub/config/blob/master/HOCON.md">spec
* for HOCON</a>.
*
* <p>
* Note that objects do not merge "across" non-objects; if you write

View file

@ -34,7 +34,7 @@ import java.util.Map;
* The API for a {@code ConfigObject} is in terms of keys, while the API for a
* {@link Config} is in terms of path expressions. Conceptually,
* {@code ConfigObject} is a tree of maps from keys to values, while a
* {@code ConfigObject} is a one-level map from paths to values.
* {@code Config} is a one-level map from paths to values.
*
* <p>
* Use {@link ConfigUtil#joinPath} and {@link ConfigUtil#splitPath} to convert

View file

@ -4,6 +4,7 @@
package com.typesafe.config;
import java.net.URL;
import java.util.List;
/**
@ -66,4 +67,16 @@ public interface ConfigOrigin {
* @return line number or -1 if none is available
*/
public int lineNumber();
/**
* Returns any comments that appeared to "go with" this place in the file.
* Often an empty list, but never null. The details of this are subject to
* change, but at the moment comments that are immediately before an array
* element or object field, with no blank line after the comment, "go with"
* that element or field.
*
* @return any comments that seemed to "go with" this origin, empty list if
* none
*/
public List<String> comments();
}

View file

@ -6,11 +6,13 @@ package com.typesafe.config;
/**
* A set of options related to resolving substitutions. Substitutions use the
* <code>${foo.bar}</code> syntax and are documented in the <a
* href="https://github.com/havocp/config/blob/master/HOCON.md">HOCON</a> spec.
* href="https://github.com/typesafehub/config/blob/master/HOCON.md">HOCON</a>
* spec.
* <p>
* This object is immutable, so the "setters" return a new object.
* <p>
* Here is an example of creating a custom {@code ConfigResolveOptions}:
*
* <pre>
* ConfigResolveOptions options = ConfigResolveOptions.defaults()
* .setUseSystemEnvironment(false)

View file

@ -5,8 +5,8 @@ package com.typesafe.config;
/**
* The syntax of a character stream, <a href="http://json.org">JSON</a>, <a
* href="https://github.com/havocp/config/blob/master/HOCON.md">HOCON</a> aka
* ".conf", or <a href=
* href="https://github.com/typesafehub/config/blob/master/HOCON.md">HOCON</a>
* aka ".conf", or <a href=
* "http://download.oracle.com/javase/7/docs/api/java/util/Properties.html#load%28java.io.Reader%29"
* >Java properties</a>.
*
@ -19,8 +19,8 @@ public enum ConfigSyntax {
JSON,
/**
* The JSON-superset <a
* href="https://github.com/havocp/config/blob/master/HOCON.md">HOCON</a>
* format.
* href="https://github.com/typesafehub/config/blob/master/HOCON.md"
* >HOCON</a> format.
*/
CONF,
/**

View file

@ -4,6 +4,10 @@ import java.util.List;
import com.typesafe.config.impl.ConfigImplUtil;
/**
* Contains static utility methods.
*
*/
public final class ConfigUtil {
private ConfigUtil() {

View file

@ -8,9 +8,9 @@ import java.util.Map;
import com.typesafe.config.impl.ConfigImpl;
/**
* This class holds some static factory methods for building ConfigValue. See
* also ConfigFactory which has methods for parsing files and certain in-memory
* data structures.
* This class holds some static factory methods for building {@link ConfigValue}
* instances. See also {@link ConfigFactory} which has methods for parsing files
* and certain in-memory data structures.
*/
public final class ConfigValueFactory {
private ConfigValueFactory() {

View file

@ -111,12 +111,12 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
return ConfigValueType.OBJECT;
}
protected abstract AbstractConfigObject newCopy(ResolveStatus status,
boolean ignoresFallbacks);
protected abstract AbstractConfigObject newCopy(ResolveStatus status, boolean ignoresFallbacks,
ConfigOrigin origin);
@Override
protected AbstractConfigObject newCopy(boolean ignoresFallbacks) {
return newCopy(resolveStatus(), ignoresFallbacks);
protected AbstractConfigObject newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
return newCopy(resolveStatus(), ignoresFallbacks, origin);
}
@Override
@ -173,7 +173,7 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
return new SimpleConfigObject(mergeOrigins(this, fallback), merged, newResolveStatus,
newIgnoresFallbacks);
else if (newResolveStatus != resolveStatus() || newIgnoresFallbacks != ignoresFallbacks())
return newCopy(newResolveStatus, newIgnoresFallbacks);
return newCopy(newResolveStatus, newIgnoresFallbacks, origin());
else
return this;
}
@ -234,7 +234,7 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
}
}
if (changes == null) {
return newCopy(newResolveStatus, ignoresFallbacks());
return newCopy(newResolveStatus, ignoresFallbacks(), origin());
} else {
Map<String, AbstractConfigValue> modified = new HashMap<String, AbstractConfigValue>();
for (String k : keySet()) {
@ -306,6 +306,12 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
sb.append("# ");
sb.append(v.origin().description());
sb.append("\n");
for (String comment : v.origin().comments()) {
indent(sb, indent + 1);
sb.append("# ");
sb.append(comment);
sb.append("\n");
}
indent(sb, indent + 1);
}
v.render(sb, indent + 1, k, formatted);

View file

@ -18,14 +18,14 @@ import com.typesafe.config.ConfigValue;
*/
abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
final private ConfigOrigin origin;
final private SimpleConfigOrigin origin;
AbstractConfigValue(ConfigOrigin origin) {
this.origin = origin;
this.origin = (SimpleConfigOrigin) origin;
}
@Override
public ConfigOrigin origin() {
public SimpleConfigOrigin origin() {
return this.origin;
}
@ -76,9 +76,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
return this;
}
protected AbstractConfigValue newCopy(boolean ignoresFallbacks) {
return this;
}
protected abstract AbstractConfigValue newCopy(boolean ignoresFallbacks, ConfigOrigin origin);
// this is virtualized rather than a field because only some subclasses
// really need to store the boolean, and they may be able to pack it
@ -105,6 +103,13 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
throw badMergeException();
}
public AbstractConfigValue withOrigin(ConfigOrigin origin) {
if (this.origin == origin)
return this;
else
return newCopy(ignoresFallbacks(), origin);
}
@Override
public AbstractConfigValue withFallback(ConfigMergeable mergeable) {
if (ignoresFallbacks()) {
@ -118,7 +123,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
AbstractConfigObject fallback = (AbstractConfigObject) other;
if (fallback.resolveStatus() == ResolveStatus.RESOLVED && fallback.isEmpty()) {
if (fallback.ignoresFallbacks())
return newCopy(true /* ignoresFallbacks */);
return newCopy(true /* ignoresFallbacks */, origin);
else
return this;
} else {
@ -128,7 +133,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
// falling back to a non-object doesn't merge anything, and also
// prohibits merging any objects that we fall back to later.
// so we have to switch to ignoresFallbacks mode.
return newCopy(true /* ignoresFallbacks */);
return newCopy(true /* ignoresFallbacks */, origin);
}
}
}

View file

@ -29,4 +29,9 @@ final class ConfigBoolean extends AbstractConfigValue {
String transformToString() {
return value ? "true" : "false";
}
@Override
protected ConfigBoolean newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
return new ConfigBoolean(origin, value);
}
}

View file

@ -107,6 +107,11 @@ final class ConfigDelayedMerge extends AbstractConfigValue implements
return ignoresFallbacks;
}
@Override
protected AbstractConfigValue newCopy(boolean newIgnoresFallbacks, ConfigOrigin newOrigin) {
return new ConfigDelayedMerge(newOrigin, stack, newIgnoresFallbacks);
}
@Override
protected final ConfigDelayedMerge mergedWithTheUnmergeable(Unmergeable fallback) {
if (ignoresFallbacks)
@ -196,6 +201,12 @@ final class ConfigDelayedMerge extends AbstractConfigValue implements
i += 1;
sb.append(v.origin().description());
sb.append("\n");
for (String comment : v.origin().comments()) {
indent(sb, indent);
sb.append("# ");
sb.append(comment);
sb.append("\n");
}
indent(sb, indent);
}

View file

@ -49,12 +49,12 @@ class ConfigDelayedMergeObject extends AbstractConfigObject implements
}
@Override
protected ConfigDelayedMergeObject newCopy(ResolveStatus status,
boolean ignoresFallbacks) {
protected ConfigDelayedMergeObject newCopy(ResolveStatus status, boolean ignoresFallbacks,
ConfigOrigin origin) {
if (status != resolveStatus())
throw new ConfigException.BugOrBroken(
"attempt to create resolved ConfigDelayedMergeObject");
return new ConfigDelayedMergeObject(origin(), stack, ignoresFallbacks);
return new ConfigDelayedMergeObject(origin, stack, ignoresFallbacks);
}
@Override

View file

@ -43,4 +43,9 @@ final class ConfigDouble extends ConfigNumber {
protected double doubleValue() {
return value;
}
@Override
protected ConfigDouble newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
return new ConfigDouble(origin, value, originalText);
}
}

View file

@ -43,4 +43,9 @@ final class ConfigInt extends ConfigNumber {
protected double doubleValue() {
return value;
}
@Override
protected ConfigInt newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
return new ConfigInt(origin, value, originalText);
}
}

View file

@ -43,4 +43,9 @@ final class ConfigLong extends ConfigNumber {
protected double doubleValue() {
return value;
}
@Override
protected ConfigLong newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
return new ConfigLong(origin, value, originalText);
}
}

View file

@ -39,4 +39,9 @@ final class ConfigNull extends AbstractConfigValue {
protected void render(StringBuilder sb, int indent, boolean formatted) {
sb.append("null");
}
@Override
protected ConfigNull newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
return new ConfigNull(origin);
}
}

View file

@ -11,7 +11,7 @@ abstract class ConfigNumber extends AbstractConfigValue {
// a sentence) we always have it exactly as the person typed it into the
// config file. It's purely cosmetic; equals/hashCode don't consider this
// for example.
final private String originalText;
final protected String originalText;
protected ConfigNumber(ConfigOrigin origin, String originalText) {
super(origin);

View file

@ -34,4 +34,9 @@ final class ConfigString extends AbstractConfigValue {
protected void render(StringBuilder sb, int indent, boolean formatted) {
sb.append(ConfigImplUtil.renderJsonString(value));
}
@Override
protected ConfigString newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
return new ConfigString(origin, value);
}
}

View file

@ -61,8 +61,8 @@ final class ConfigSubstitution extends AbstractConfigValue implements
}
@Override
protected ConfigSubstitution newCopy(boolean ignoresFallbacks) {
return new ConfigSubstitution(origin(), pieces, prefixLength, ignoresFallbacks);
protected ConfigSubstitution newCopy(boolean ignoresFallbacks, ConfigOrigin newOrigin) {
return new ConfigSubstitution(newOrigin, pieces, prefixLength, ignoresFallbacks);
}
@Override

View file

@ -32,9 +32,53 @@ final class Parser {
return context.parse();
}
static private final class TokenWithComments {
final Token token;
final List<Token> comments;
TokenWithComments(Token token, List<Token> comments) {
this.token = token;
this.comments = comments;
}
TokenWithComments(Token token) {
this(token, Collections.<Token> emptyList());
}
TokenWithComments prepend(List<Token> earlier) {
if (this.comments.isEmpty()) {
return new TokenWithComments(token, earlier);
} else {
List<Token> merged = new ArrayList<Token>();
merged.addAll(earlier);
merged.addAll(comments);
return new TokenWithComments(token, merged);
}
}
SimpleConfigOrigin setComments(SimpleConfigOrigin origin) {
if (comments.isEmpty()) {
return origin;
} else {
List<String> newComments = new ArrayList<String>();
for (Token c : comments) {
newComments.add(Tokens.getCommentText(c));
}
return origin.setComments(newComments);
}
}
@Override
public String toString() {
// this ends up in user-visible error messages, so we don't want the
// comments
return token.toString();
}
}
static private final class ParseContext {
private int lineNumber;
final private Stack<Token> buffer;
final private Stack<TokenWithComments> buffer;
final private Iterator<Token> tokens;
final private ConfigIncluder includer;
final private ConfigIncludeContext includeContext;
@ -50,7 +94,7 @@ final class Parser {
Iterator<Token> tokens, ConfigIncluder includer,
ConfigIncludeContext includeContext) {
lineNumber = 1;
buffer = new Stack<Token>();
buffer = new Stack<TokenWithComments>();
this.tokens = tokens;
this.flavor = flavor;
this.baseOrigin = origin;
@ -60,13 +104,66 @@ final class Parser {
this.equalsCount = 0;
}
private Token nextToken() {
Token t = null;
if (buffer.isEmpty()) {
t = tokens.next();
} else {
t = buffer.pop();
private void consolidateCommentBlock(Token commentToken) {
// a comment block "goes with" the following token
// unless it's separated from it by a blank line.
// we want to build a list of newline tokens followed
// by a non-newline non-comment token; with all comments
// associated with that final non-newline non-comment token.
List<Token> newlines = new ArrayList<Token>();
List<Token> comments = new ArrayList<Token>();
Token previous = null;
Token next = commentToken;
while (true) {
if (Tokens.isNewline(next)) {
if (previous != null && Tokens.isNewline(previous)) {
// blank line; drop all comments to this point and
// start a new comment block
comments.clear();
}
newlines.add(next);
} else if (Tokens.isComment(next)) {
comments.add(next);
} else {
// a non-newline non-comment token
break;
}
previous = next;
next = tokens.next();
}
// put our concluding token in the queue with all the comments
// attached
buffer.push(new TokenWithComments(next, comments));
// now put all the newlines back in front of it
ListIterator<Token> li = newlines.listIterator(newlines.size());
while (li.hasPrevious()) {
buffer.push(new TokenWithComments(li.previous()));
}
}
private TokenWithComments popToken() {
if (buffer.isEmpty()) {
Token t = tokens.next();
if (Tokens.isComment(t)) {
consolidateCommentBlock(t);
return buffer.pop();
} else {
return new TokenWithComments(t);
}
} else {
return buffer.pop();
}
}
private TokenWithComments nextToken() {
TokenWithComments withComments = null;
withComments = popToken();
Token t = withComments.token;
if (Tokens.isProblem(t)) {
ConfigOrigin origin = t.origin();
@ -79,8 +176,7 @@ final class Parser {
message = addKeyName(message);
}
throw new ConfigException.Parse(origin, message, cause);
}
} else {
if (flavor == ConfigSyntax.JSON) {
if (Tokens.isUnquotedText(t)) {
throw parseError(addKeyName("Token not allowed in valid JSON: '"
@ -90,21 +186,25 @@ final class Parser {
}
}
return t;
return withComments;
}
}
private void putBack(Token token) {
private void putBack(TokenWithComments token) {
buffer.push(token);
}
private Token nextTokenIgnoringNewline() {
Token t = nextToken();
while (Tokens.isNewline(t)) {
private TokenWithComments nextTokenIgnoringNewline() {
TokenWithComments t = nextToken();
while (Tokens.isNewline(t.token)) {
// line number tokens have the line that was _ended_ by the
// newline, so we have to add one.
lineNumber = t.lineNumber() + 1;
lineNumber = t.token.lineNumber() + 1;
t = nextToken();
}
return t;
}
@ -116,8 +216,8 @@ final class Parser {
// is left just after the comma or the newline.
private boolean checkElementSeparator() {
if (flavor == ConfigSyntax.JSON) {
Token t = nextTokenIgnoringNewline();
if (t == Tokens.COMMA) {
TokenWithComments t = nextTokenIgnoringNewline();
if (t.token == Tokens.COMMA) {
return true;
} else {
putBack(t);
@ -125,15 +225,16 @@ final class Parser {
}
} else {
boolean sawSeparatorOrNewline = false;
Token t = nextToken();
TokenWithComments t = nextToken();
while (true) {
if (Tokens.isNewline(t)) {
if (Tokens.isNewline(t.token)) {
// newline number is the line just ended, so add one
lineNumber = t.lineNumber() + 1;
lineNumber = t.token.lineNumber() + 1;
sawSeparatorOrNewline = true;
// we want to continue to also eat
// a comma if there is one.
} else if (t == Tokens.COMMA) {
} else if (t.token == Tokens.COMMA) {
return true;
} else {
// non-newline-or-comma
@ -154,12 +255,17 @@ final class Parser {
return;
List<Token> values = null; // create only if we have value tokens
Token t = nextTokenIgnoringNewline(); // ignore a newline up front
while (Tokens.isValue(t) || Tokens.isUnquotedText(t)
|| Tokens.isSubstitution(t)) {
if (values == null)
TokenWithComments firstValueWithComments = null;
TokenWithComments t = nextTokenIgnoringNewline(); // ignore a
// newline up
// front
while (Tokens.isValue(t.token) || Tokens.isUnquotedText(t.token)
|| Tokens.isSubstitution(t.token)) {
if (values == null) {
values = new ArrayList<Token>();
values.add(t);
firstValueWithComments = t;
}
values.add(t.token);
t = nextToken(); // but don't consolidate across a newline
}
// the last one wasn't a value token
@ -168,9 +274,9 @@ final class Parser {
if (values == null)
return;
if (values.size() == 1 && Tokens.isValue(values.get(0))) {
if (values.size() == 1 && Tokens.isValue(firstValueWithComments.token)) {
// a single value token requires no consolidation
putBack(values.get(0));
putBack(firstValueWithComments);
return;
}
@ -235,7 +341,7 @@ final class Parser {
firstOrigin, minimized));
}
putBack(consolidated);
putBack(new TokenWithComments(consolidated, firstValueWithComments.comments));
}
private ConfigOrigin lineOrigin() {
@ -309,17 +415,23 @@ final class Parser {
return part + ")";
}
private AbstractConfigValue parseValue(Token token) {
if (Tokens.isValue(token)) {
return Tokens.getValue(token);
} else if (token == Tokens.OPEN_CURLY) {
return parseObject(true);
} else if (token == Tokens.OPEN_SQUARE) {
return parseArray();
private AbstractConfigValue parseValue(TokenWithComments t) {
AbstractConfigValue v;
if (Tokens.isValue(t.token)) {
v = Tokens.getValue(t.token);
} else if (t.token == Tokens.OPEN_CURLY) {
v = parseObject(true);
} else if (t.token == Tokens.OPEN_SQUARE) {
v = parseArray();
} else {
throw parseError(addQuoteSuggestion(token.toString(),
"Expecting a value but got wrong token: " + token));
throw parseError(addQuoteSuggestion(t.token.toString(),
"Expecting a value but got wrong token: " + t.token));
}
v = v.withOrigin(t.setComments(v.origin()));
return v;
}
private static AbstractConfigObject createValueUnderPath(Path path,
@ -339,24 +451,29 @@ final class Parser {
remaining = remaining.remainder();
}
}
// the setComments(null) is to ensure comments are only
// on the exact leaf node they apply to.
// a comment before "foo.bar" applies to the full setting
// "foo.bar" not also to "foo"
ListIterator<String> i = keys.listIterator(keys.size());
String deepest = i.previous();
AbstractConfigObject o = new SimpleConfigObject(value.origin(),
AbstractConfigObject o = new SimpleConfigObject(value.origin().setComments(null),
Collections.<String, AbstractConfigValue> singletonMap(
deepest, value));
while (i.hasPrevious()) {
Map<String, AbstractConfigValue> m = Collections.<String, AbstractConfigValue> singletonMap(
i.previous(), o);
o = new SimpleConfigObject(value.origin(), m);
o = new SimpleConfigObject(value.origin().setComments(null), m);
}
return o;
}
private Path parseKey(Token token) {
private Path parseKey(TokenWithComments token) {
if (flavor == ConfigSyntax.JSON) {
if (Tokens.isValueWithType(token, ConfigValueType.STRING)) {
String key = (String) Tokens.getValue(token).unwrapped();
if (Tokens.isValueWithType(token.token, ConfigValueType.STRING)) {
String key = (String) Tokens.getValue(token.token).unwrapped();
return Path.newKey(key);
} else {
throw parseError(addKeyName("Expecting close brace } or a field name here, got "
@ -364,9 +481,9 @@ final class Parser {
}
} else {
List<Token> expression = new ArrayList<Token>();
Token t = token;
while (Tokens.isValue(t) || Tokens.isUnquotedText(t)) {
expression.add(t);
TokenWithComments t = token;
while (Tokens.isValue(t.token) || Tokens.isUnquotedText(t.token)) {
expression.add(t.token);
t = nextToken(); // note: don't cross a newline
}
@ -400,13 +517,13 @@ final class Parser {
}
private void parseInclude(Map<String, AbstractConfigValue> values) {
Token t = nextTokenIgnoringNewline();
while (isUnquotedWhitespace(t)) {
TokenWithComments t = nextTokenIgnoringNewline();
while (isUnquotedWhitespace(t.token)) {
t = nextTokenIgnoringNewline();
}
if (Tokens.isValueWithType(t, ConfigValueType.STRING)) {
String name = (String) Tokens.getValue(t).unwrapped();
if (Tokens.isValueWithType(t.token, ConfigValueType.STRING)) {
String name = (String) Tokens.getValue(t.token).unwrapped();
AbstractConfigObject obj = (AbstractConfigObject) includer
.include(includeContext, name);
@ -448,8 +565,8 @@ final class Parser {
boolean lastInsideEquals = false;
while (true) {
Token t = nextTokenIgnoringNewline();
if (t == Tokens.CLOSE_CURLY) {
TokenWithComments t = nextTokenIgnoringNewline();
if (t.token == Tokens.CLOSE_CURLY) {
if (flavor == ConfigSyntax.JSON && afterComma) {
throw parseError(addQuoteSuggestion(t.toString(),
"expecting a field name after a comma, got a close brace } instead"));
@ -458,45 +575,45 @@ final class Parser {
"unbalanced close brace '}' with no open brace"));
}
break;
} else if (t == Tokens.END && !hadOpenCurly) {
} else if (t.token == Tokens.END && !hadOpenCurly) {
putBack(t);
break;
} else if (flavor != ConfigSyntax.JSON && isIncludeKeyword(t)) {
} else if (flavor != ConfigSyntax.JSON && isIncludeKeyword(t.token)) {
parseInclude(values);
afterComma = false;
} else {
Path path = parseKey(t);
Token afterKey = nextTokenIgnoringNewline();
TokenWithComments keyToken = t;
Path path = parseKey(keyToken);
TokenWithComments afterKey = nextTokenIgnoringNewline();
boolean insideEquals = false;
// path must be on-stack while we parse the value
pathStack.push(path);
Token valueToken;
TokenWithComments valueToken;
AbstractConfigValue newValue;
if (flavor == ConfigSyntax.CONF
&& afterKey == Tokens.OPEN_CURLY) {
if (flavor == ConfigSyntax.CONF && afterKey.token == Tokens.OPEN_CURLY) {
// can omit the ':' or '=' before an object value
valueToken = afterKey;
newValue = parseObject(true);
} else {
if (!isKeyValueSeparatorToken(afterKey)) {
if (!isKeyValueSeparatorToken(afterKey.token)) {
throw parseError(addQuoteSuggestion(afterKey.toString(),
"Key '" + path.render() + "' may not be followed by token: "
+ afterKey));
}
if (afterKey == Tokens.EQUALS) {
if (afterKey.token == Tokens.EQUALS) {
insideEquals = true;
equalsCount += 1;
}
consolidateValueTokens();
valueToken = nextTokenIgnoringNewline();
newValue = parseValue(valueToken);
}
newValue = parseValue(valueToken.prepend(keyToken.comments));
lastPath = pathStack.pop();
if (insideEquals) {
equalsCount -= 1;
@ -547,7 +664,7 @@ final class Parser {
afterComma = true;
} else {
t = nextTokenIgnoringNewline();
if (t == Tokens.CLOSE_CURLY) {
if (t.token == Tokens.CLOSE_CURLY) {
if (!hadOpenCurly) {
throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
t.toString(), "unbalanced close brace '}' with no open brace"));
@ -557,7 +674,7 @@ final class Parser {
throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
t.toString(), "Expecting close brace } or a comma, got " + t));
} else {
if (t == Tokens.END) {
if (t.token == Tokens.END) {
putBack(t);
break;
} else {
@ -567,6 +684,7 @@ final class Parser {
}
}
}
return new SimpleConfigObject(objectOrigin, values);
}
@ -577,18 +695,15 @@ final class Parser {
consolidateValueTokens();
Token t = nextTokenIgnoringNewline();
TokenWithComments t = nextTokenIgnoringNewline();
// special-case the first element
if (t == Tokens.CLOSE_SQUARE) {
if (t.token == Tokens.CLOSE_SQUARE) {
return new SimpleConfigList(arrayOrigin,
Collections.<AbstractConfigValue> emptyList());
} else if (Tokens.isValue(t)) {
} else if (Tokens.isValue(t.token) || t.token == Tokens.OPEN_CURLY
|| t.token == Tokens.OPEN_SQUARE) {
values.add(parseValue(t));
} else if (t == Tokens.OPEN_CURLY) {
values.add(parseObject(true));
} else if (t == Tokens.OPEN_SQUARE) {
values.add(parseArray());
} else {
throw parseError(addKeyName("List should have ] or a first element after the open [, instead had token: "
+ t
@ -604,7 +719,7 @@ final class Parser {
// comma (or newline equivalent) consumed
} else {
t = nextTokenIgnoringNewline();
if (t == Tokens.CLOSE_SQUARE) {
if (t.token == Tokens.CLOSE_SQUARE) {
return new SimpleConfigList(arrayOrigin, values);
} else {
throw parseError(addKeyName("List should have ended with ] or had a comma, instead had token: "
@ -619,14 +734,10 @@ final class Parser {
consolidateValueTokens();
t = nextTokenIgnoringNewline();
if (Tokens.isValue(t)) {
if (Tokens.isValue(t.token) || t.token == Tokens.OPEN_CURLY
|| t.token == Tokens.OPEN_SQUARE) {
values.add(parseValue(t));
} else if (t == Tokens.OPEN_CURLY) {
values.add(parseObject(true));
} else if (t == Tokens.OPEN_SQUARE) {
values.add(parseArray());
} else if (flavor != ConfigSyntax.JSON
&& t == Tokens.CLOSE_SQUARE) {
} else if (flavor != ConfigSyntax.JSON && t.token == Tokens.CLOSE_SQUARE) {
// we allow one trailing comma
putBack(t);
} else {
@ -640,8 +751,8 @@ final class Parser {
}
AbstractConfigValue parse() {
Token t = nextTokenIgnoringNewline();
if (t == Tokens.START) {
TokenWithComments t = nextTokenIgnoringNewline();
if (t.token == Tokens.START) {
// OK
} else {
throw new ConfigException.BugOrBroken(
@ -650,13 +761,11 @@ final class Parser {
t = nextTokenIgnoringNewline();
AbstractConfigValue result = null;
if (t == Tokens.OPEN_CURLY) {
result = parseObject(true);
} else if (t == Tokens.OPEN_SQUARE) {
result = parseArray();
if (t.token == Tokens.OPEN_CURLY || t.token == Tokens.OPEN_SQUARE) {
result = parseValue(t);
} else {
if (flavor == ConfigSyntax.JSON) {
if (t == Tokens.END) {
if (t.token == Tokens.END) {
throw parseError("Empty document");
} else {
throw parseError("Document must have an object or array at root, unexpected token: "
@ -668,11 +777,14 @@ final class Parser {
// of it, so put it back.
putBack(t);
result = parseObject(false);
// in this case we don't try to use commentsStack comments
// since they would all presumably apply to fields not the
// root object
}
}
t = nextTokenIgnoringNewline();
if (t == Tokens.END) {
if (t.token == Tokens.END) {
return result;
} else {
throw parseError("Document has trailing tokens after first object or array: "

View file

@ -145,6 +145,14 @@ final class SimpleConfigList extends AbstractConfigValue implements ConfigList {
sb.append("# ");
sb.append(v.origin().description());
sb.append("\n");
for (String comment : v.origin().comments()) {
indent(sb, indent + 1);
sb.append("# ");
sb.append(comment);
sb.append("\n");
}
indent(sb, indent + 1);
}
v.render(sb, indent + 1, formatted);
@ -353,4 +361,9 @@ final class SimpleConfigList extends AbstractConfigValue implements ConfigList {
public ConfigValue set(int index, ConfigValue element) {
throw weAreImmutable("set");
}
@Override
protected SimpleConfigList newCopy(boolean ignoresFallbacks, ConfigOrigin newOrigin) {
return new SimpleConfigList(newOrigin, value);
}
}

View file

@ -45,8 +45,9 @@ final class SimpleConfigObject extends AbstractConfigObject {
}
@Override
protected SimpleConfigObject newCopy(ResolveStatus newStatus, boolean newIgnoresFallbacks) {
return new SimpleConfigObject(origin(), value, newStatus, newIgnoresFallbacks);
protected SimpleConfigObject newCopy(ResolveStatus newStatus, boolean newIgnoresFallbacks,
ConfigOrigin newOrigin) {
return new SimpleConfigObject(newOrigin, value, newStatus, newIgnoresFallbacks);
}
@Override

View file

@ -8,6 +8,7 @@ import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@ -22,19 +23,21 @@ final class SimpleConfigOrigin implements ConfigOrigin {
final private int endLineNumber;
final private OriginType originType;
final private String urlOrNull;
final private List<String> commentsOrNull;
protected SimpleConfigOrigin(String description, int lineNumber, int endLineNumber,
OriginType originType,
String urlOrNull) {
String urlOrNull, List<String> commentsOrNull) {
this.description = description;
this.lineNumber = lineNumber;
this.endLineNumber = endLineNumber;
this.originType = originType;
this.urlOrNull = urlOrNull;
this.commentsOrNull = commentsOrNull;
}
static SimpleConfigOrigin newSimple(String description) {
return new SimpleConfigOrigin(description, -1, -1, OriginType.GENERIC, null);
return new SimpleConfigOrigin(description, -1, -1, OriginType.GENERIC, null, null);
}
static SimpleConfigOrigin newFile(String filename) {
@ -44,17 +47,17 @@ final class SimpleConfigOrigin implements ConfigOrigin {
} catch (MalformedURLException e) {
url = null;
}
return new SimpleConfigOrigin(filename, -1, -1, OriginType.FILE, url);
return new SimpleConfigOrigin(filename, -1, -1, OriginType.FILE, url, null);
}
static SimpleConfigOrigin newURL(URL url) {
String u = url.toExternalForm();
return new SimpleConfigOrigin(u, -1, -1, OriginType.URL, u);
return new SimpleConfigOrigin(u, -1, -1, OriginType.URL, u, null);
}
static SimpleConfigOrigin newResource(String resource, URL url) {
return new SimpleConfigOrigin(resource, -1, -1, OriginType.RESOURCE,
url != null ? url.toExternalForm() : null);
url != null ? url.toExternalForm() : null, null);
}
static SimpleConfigOrigin newResource(String resource) {
@ -66,13 +69,22 @@ final class SimpleConfigOrigin implements ConfigOrigin {
return this;
} else {
return new SimpleConfigOrigin(this.description, lineNumber, lineNumber,
this.originType, this.urlOrNull);
this.originType, this.urlOrNull, this.commentsOrNull);
}
}
SimpleConfigOrigin addURL(URL url) {
return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber, this.originType,
url != null ? url.toExternalForm() : null);
return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber,
this.originType, url != null ? url.toExternalForm() : null, this.commentsOrNull);
}
SimpleConfigOrigin setComments(List<String> comments) {
if (ConfigImplUtil.equalsHandlingNull(comments, this.commentsOrNull)) {
return this;
} else {
return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber,
this.originType, this.urlOrNull, comments);
}
}
@Override
@ -172,12 +184,22 @@ final class SimpleConfigOrigin implements ConfigOrigin {
return lineNumber;
}
@Override
public List<String> comments() {
if (commentsOrNull != null) {
return commentsOrNull;
} else {
return Collections.emptyList();
}
}
static final String MERGE_OF_PREFIX = "merge of ";
private static SimpleConfigOrigin mergeTwo(SimpleConfigOrigin a, SimpleConfigOrigin b) {
String mergedDesc;
int mergedStartLine;
int mergedEndLine;
List<String> mergedComments;
OriginType mergedType;
if (a.originType == b.originType) {
@ -233,8 +255,18 @@ final class SimpleConfigOrigin implements ConfigOrigin {
mergedURL = null;
}
if (ConfigImplUtil.equalsHandlingNull(a.commentsOrNull, b.commentsOrNull)) {
mergedComments = a.commentsOrNull;
} else {
mergedComments = new ArrayList<String>();
if (a.commentsOrNull != null)
mergedComments.addAll(a.commentsOrNull);
if (b.commentsOrNull != null)
mergedComments.addAll(b.commentsOrNull);
}
return new SimpleConfigOrigin(mergedDesc, mergedStartLine, mergedEndLine, mergedType,
mergedURL);
mergedURL, mergedComments);
}
private static int similarity(SimpleConfigOrigin a, SimpleConfigOrigin b) {

View file

@ -17,5 +17,6 @@ enum TokenType {
NEWLINE,
UNQUOTED_TEXT,
SUBSTITUTION,
PROBLEM;
PROBLEM,
COMMENT;
}

View file

@ -168,40 +168,27 @@ final class Tokenizer {
return c != '\n' && ConfigImplUtil.isWhitespace(c);
}
private int slurpComment() {
for (;;) {
int c = nextCharRaw();
if (c == -1 || c == '\n') {
return c;
}
}
}
// get next char, skipping comments
private int nextCharSkippingComments() {
for (;;) {
int c = nextCharRaw();
private boolean startOfComment(int c) {
if (c == -1) {
return -1;
return false;
} else {
if (allowComments) {
if (c == '#') {
return slurpComment();
return true;
} else if (c == '/') {
int maybeSecondSlash = nextCharRaw();
if (maybeSecondSlash == '/') {
return slurpComment();
} else {
// we want to predictably NOT consume any chars
putBack(maybeSecondSlash);
return c;
if (maybeSecondSlash == '/') {
return true;
} else {
return false;
}
} else {
return c;
return false;
}
} else {
return c;
}
return false;
}
}
}
@ -209,7 +196,7 @@ final class Tokenizer {
// get next char, skipping non-newline whitespace
private int nextCharAfterWhitespace(WhitespaceSaver saver) {
for (;;) {
int c = nextCharSkippingComments();
int c = nextCharRaw();
if (c == -1) {
return -1;
@ -269,6 +256,27 @@ final class Tokenizer {
return ((SimpleConfigOrigin) baseOrigin).setLineNumber(lineNumber);
}
// ONE char has always been consumed, either the # or the first /, but
// not both slashes
private Token pullComment(int firstChar) {
if (firstChar == '/') {
int discard = nextCharRaw();
if (discard != '/')
throw new ConfigException.BugOrBroken("called pullComment but // not seen");
}
StringBuilder sb = new StringBuilder();
for (;;) {
int c = nextCharRaw();
if (c == -1 || c == '\n') {
putBack(c);
return Tokens.newComment(lineOrigin, sb.toString());
} else {
sb.appendCodePoint(c);
}
}
}
// chars JSON allows a number to start with
static final String firstNumberChars = "0123456789-";
// chars JSON allows to be part of a number
@ -283,7 +291,7 @@ final class Tokenizer {
private Token pullUnquotedText() {
ConfigOrigin origin = lineOrigin;
StringBuilder sb = new StringBuilder();
int c = nextCharSkippingComments();
int c = nextCharRaw();
while (true) {
if (c == -1) {
break;
@ -291,6 +299,8 @@ final class Tokenizer {
break;
} else if (isWhitespace(c)) {
break;
} else if (startOfComment(c)) {
break;
} else {
sb.appendCodePoint(c);
}
@ -310,7 +320,7 @@ final class Tokenizer {
return Tokens.newBoolean(origin, false);
}
c = nextCharSkippingComments();
c = nextCharRaw();
}
// put back the char that ended the unquoted text
@ -324,12 +334,12 @@ final class Tokenizer {
StringBuilder sb = new StringBuilder();
sb.appendCodePoint(firstChar);
boolean containedDecimalOrE = false;
int c = nextCharSkippingComments();
int c = nextCharRaw();
while (c != -1 && numberChars.indexOf(c) >= 0) {
if (c == '.' || c == 'e' || c == 'E')
containedDecimalOrE = true;
sb.appendCodePoint(c);
c = nextCharSkippingComments();
c = nextCharRaw();
}
// the last character we looked at wasn't part of the number, put it
// back
@ -382,7 +392,7 @@ final class Tokenizer {
// kind of absurdly slow, but screw it for now
char[] a = new char[4];
for (int i = 0; i < 4; ++i) {
int c = nextCharSkippingComments();
int c = nextCharRaw();
if (c == -1)
throw problem("End of input but expecting 4 hex digits for \\uXXXX escape");
a[i] = (char) c;
@ -431,14 +441,14 @@ final class Tokenizer {
private Token pullSubstitution() throws ProblemException {
// the initial '$' has already been consumed
ConfigOrigin origin = lineOrigin;
int c = nextCharSkippingComments();
int c = nextCharRaw();
if (c != '{') {
throw problem(asString(c), "'$' not followed by {, '" + asString(c)
+ "' not allowed after '$'", true /* suggestQuotes */);
}
boolean optional = false;
c = nextCharSkippingComments();
c = nextCharRaw();
if (c == '?') {
optional = true;
} else {
@ -484,6 +494,9 @@ final class Tokenizer {
return line;
} else {
Token t = null;
if (startOfComment(c)) {
t = pullComment(c);
} else {
switch (c) {
case '"':
t = pullQuotedString();
@ -525,6 +538,7 @@ final class Tokenizer {
t = pullUnquotedText();
}
}
}
if (t == null)
throw new ConfigException.BugOrBroken(
@ -548,6 +562,7 @@ final class Tokenizer {
Token whitespace = whitespaceSaver.check(t, origin, lineNumber);
if (whitespace != null)
tokens.add(whitespace);
tokens.add(t);
}

View file

@ -52,7 +52,7 @@ final class Tokens {
@Override
public String toString() {
return "'\n'@" + lineNumber();
return "'\\n'@" + lineNumber();
}
@Override
@ -167,6 +167,45 @@ final class Tokens {
}
}
static private class Comment extends Token {
final private String text;
Comment(ConfigOrigin origin, String text) {
super(TokenType.COMMENT, origin);
this.text = text;
}
String text() {
return text;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("'#");
sb.append(text);
sb.append("' (COMMENT)");
return sb.toString();
}
@Override
protected boolean canEqual(Object other) {
return other instanceof Comment;
}
@Override
public boolean equals(Object other) {
return super.equals(other) && ((Comment) other).text.equals(text);
}
@Override
public int hashCode() {
int h = 41 * (41 + super.hashCode());
h = 41 * (h + text.hashCode());
return h;
}
}
// This is not a Value, because it requires special processing
static private class Substitution extends Token {
final private boolean optional;
@ -262,6 +301,18 @@ final class Tokens {
}
}
static boolean isComment(Token token) {
return token instanceof Comment;
}
static String getCommentText(Token token) {
if (token instanceof Comment) {
return ((Comment) token).text();
} else {
throw new ConfigException.BugOrBroken("tried to get comment text from " + token);
}
}
static boolean isUnquotedText(Token token) {
return token instanceof UnquotedText;
}
@ -316,6 +367,10 @@ final class Tokens {
return new Problem(origin, what, message, suggestQuotes, cause);
}
static Token newComment(ConfigOrigin origin, String text) {
return new Comment(origin, text);
}
static Token newUnquotedText(ConfigOrigin origin, String s) {
return new UnquotedText(origin, s);
}

View file

@ -3,27 +3,37 @@
##############################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
version = "2.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka.
# Akka version, checked against the runtime version of Akka.
version = "2.0-SNAPSHOT"
home = "" # Home directory of Akka, modules in the deploy directory will be loaded
# Home directory of Akka, modules in the deploy directory will be loaded
home = ""
enabled-modules = [] # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"]
# Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"]
enabled-modules = []
event-handlers = ["akka.event.Logging$DefaultLogger"] # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
loglevel = "INFO" # Options: ERROR, WARNING, INFO, DEBUG
# this level is used by the configured loggers (see "event-handlers") as soon
# Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
event-handlers = ["akka.event.Logging$DefaultLogger"]
# Log level used by the configured loggers (see "event-handlers") as soon
# as they have been started; before that, see "stdout-loglevel"
stdout-loglevel = "WARNING" # Loglevel for the very basic logger activated during AkkaApplication startup
# FIXME: Is there any sensible reason why we have 2 different log levels?
# Options: ERROR, WARNING, INFO, DEBUG
loglevel = "INFO"
logConfigOnStart = off # Log the complete configuration at INFO level when the actor system is started.
# Log level for the very basic logger activated during AkkaApplication startup
# Options: ERROR, WARNING, INFO, DEBUG
stdout-loglevel = "WARNING"
# Log the complete configuration at INFO level when the actor system is started.
# This is useful when you are uncertain of what configuration is used.
logConfigOnStart = off
extensions = [] # List FQCN of extensions which shall be loaded at actor system startup.
# List FQCN of extensions which shall be loaded at actor system startup.
# FIXME: clarify "extensions" here, "Akka Extensions (<link to docs>)"
extensions = []
# These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
# Can be used to bootstrap your application(s)
@ -35,21 +45,34 @@ akka {
boot = []
actor {
provider = "akka.actor.LocalActorRefProvider"
creation-timeout = 20s # Timeout for ActorSystem.actorOf
reaper-interval = 5s # frequency with which stopping actors are prodded in case they had to be removed from their parents
timeout = 5s # Default timeout for Future based invocations
# Timeout for ActorSystem.actorOf
creation-timeout = 20s
# frequency with which stopping actors are prodded in case they had to be removed from their parents
reaper-interval = 5s
# Default timeout for Future based invocations
# - Actor: ask && ?
# - UntypedActor: ask
# - TypedActor: methods with non-void return type
serialize-messages = off # Does a deep clone of (non-primitive) messages to ensure immutability
dispatcher-shutdown-timeout = 1s # How long dispatchers by default will wait for new actors until they shut down
timeout = 5s
# Does a deep clone of (non-primitive) messages to ensure immutability
serialize-messages = off
# How long dispatchers by default will wait for new actors until they shut down
dispatcher-shutdown-timeout = 1s
deployment {
default { # deployment id pattern, e.g. /app/service-ping
# deployment id pattern, e.g. /user/service-ping
default {
router = "direct" # routing (load-balance) scheme to use
# routing (load-balance) scheme to use
# available: "direct", "round-robin", "random", "scatter-gather"
# or: fully qualified class name of the router class
# default is "direct";
@ -59,65 +82,113 @@ akka {
# supplied in the source code (overridable using create-as below)
# - target.paths: will look the paths up using actorFor and route to
# them, i.e. will not create children
router = "direct"
nr-of-instances = 1 # number of children to create in case of a non-direct router; this setting
# number of children to create in case of a non-direct router; this setting
# is ignored if target.paths is given
nr-of-instances = 1
create-as { # FIXME document 'create-as'
class = "" # fully qualified class name of recipe implementation
# FIXME document 'create-as', ticket 1511
create-as {
# fully qualified class name of recipe implementation
class = ""
}
target {
paths = [] # Alternatively to giving nr-of-instances you can specify the full paths of
# Alternatively to giving nr-of-instances you can specify the full paths of
# those actors which should be routed to. This setting takes precedence over
# nr-of-instances
paths = []
}
}
}
default-dispatcher {
type = "Dispatcher" # Must be one of the following
# Must be one of the following
# Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
# A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
name = "DefaultDispatcher" # Name used in log messages and thread names.
daemonic = off # Toggles whether the threads created by this dispatcher should be daemons or not
keep-alive-time = 60s # Keep alive time for threads
core-pool-size-min = 8 # minimum number of threads to cap factor-based core number to
core-pool-size-factor = 8.0 # No of core threads ... ceil(available processors * factor)
core-pool-size-max = 4096 # maximum number of threads to cap factor-based number to
type = "Dispatcher"
# Name used in log messages and thread names.
name = "DefaultDispatcher"
# Toggles whether the threads created by this dispatcher should be daemons or not
daemonic = off
# Keep alive time for threads
keep-alive-time = 60s
# minimum number of threads to cap factor-based core number to
core-pool-size-min = 8
# No of core threads ... ceil(available processors * factor)
core-pool-size-factor = 8.0
# maximum number of threads to cap factor-based number to
core-pool-size-max = 4096
# Hint: max-pool-size is only used for bounded task queues
max-pool-size-min = 8 # minimum number of threads to cap factor-based max number to
max-pool-size-factor = 8.0 # Max no of threads ... ceil(available processors * factor)
max-pool-size-max = 4096 # maximum number of threads to cap factor-based max number to
task-queue-size = -1 # Specifies the bounded capacity of the task queue (< 1 == unbounded)
task-queue-type = "linked" # Specifies which type of task queue will be used, can be "array" or "linked" (default)
allow-core-timeout = on # Allow core threads to time out
throughput = 5 # Throughput defines the number of messages that are processed in a batch before the
# minimum number of threads to cap factor-based max number to
max-pool-size-min = 8
# Max no of threads ... ceil(available processors * factor)
max-pool-size-factor = 8.0
# maximum number of threads to cap factor-based max number to
max-pool-size-max = 4096
# Specifies the bounded capacity of the task queue (< 1 == unbounded)
task-queue-size = -1
# Specifies which type of task queue will be used, can be "array" or "linked" (default)
task-queue-type = "linked"
# Allow core threads to time out
allow-core-timeout = on
# Throughput defines the number of messages that are processed in a batch before the
# thread is returned to the pool. Set to 1 for as fair as possible.
throughput-deadline-time = 0ms # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
mailbox-capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default)
throughput = 5
# Throughput deadline for Dispatcher, set to 0 or negative for no deadline
throughput-deadline-time = 0ms
# If negative (or zero) then an unbounded mailbox is used (default)
# If positive then a bounded mailbox is used and the capacity is set using the property
# NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care
# The following are only used for Dispatcher and only if mailbox-capacity > 0
mailbox-push-timeout-time = 10s # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
mailbox-capacity = -1
# Specifies the timeout to add a new message to a mailbox that is full -
# negative number means infinite timeout
mailbox-push-timeout-time = 10s
}
debug {
receive = off # enable function of Actor.loggable(), which is to log any received message at DEBUG level
autoreceive = off # enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
lifecycle = off # enable DEBUG logging of actor lifecycle changes
fsm = off # enable DEBUG logging of all LoggingFSMs for events, transitions and timers
event-stream = off # enable DEBUG logging of subscription changes on the eventStream
# enable function of Actor.loggable(), which is to log any received message at DEBUG level
receive = off
# enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
autoreceive = off
# enable DEBUG logging of actor lifecycle changes
lifecycle = off
# enable DEBUG logging of all LoggingFSMs for events, transitions and timers
fsm = off
# enable DEBUG logging of subscription changes on the eventStream
event-stream = off
}
# Entries for pluggable serializers and their bindings. If a binding for a specific class is not found,
# then the default serializer (Java serialization) is used.
#
serializers {
# java = "akka.serialization.JavaSerializer"
# proto = "akka.testing.ProtobufSerializer"
# sjson = "akka.testing.SJSONSerializer"
default = "akka.serialization.JavaSerializer"
}
@ -138,7 +209,6 @@ akka {
#
scheduler {
# The HashedWheelTimer (HWT) implementation from Netty is used as the default scheduler in the system.
#
# HWT does not execute the scheduled tasks on exact time.
# It will, on every tick, check if there are any tasks behind the schedule and execute them.
# You can increase or decrease the accuracy of the execution timing by specifying smaller or larger tick duration.

View file

@ -103,30 +103,30 @@ A custom ``application.conf`` might look like this::
# Copy in parts of the reference files and modify as you please.
akka {
# Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
event-handlers = ["akka.event.slf4j.Slf4jEventHandler"]
loglevel = DEBUG # Options: ERROR, WARNING, INFO, DEBUG
# this level is used by the configured loggers (see "event-handlers") as soon
# Log level used by the configured loggers (see "event-handlers") as soon
# as they have been started; before that, see "stdout-loglevel"
stdout-loglevel = DEBUG # Loglevel for the very basic logger activated during AkkaApplication startup
# Options: ERROR, WARNING, INFO, DEBUG
loglevel = DEBUG
# Comma separated list of the enabled modules.
enabled-modules = ["camel", "remote"]
# These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
# Can be used to bootstrap your application(s)
# Should be the FQN (Fully Qualified Name) of the boot class which needs to have a default constructor
boot = ["sample.camel.Boot",
"sample.myservice.Boot"]
# Log level for the very basic logger activated during AkkaApplication startup
# Options: ERROR, WARNING, INFO, DEBUG
stdout-loglevel = DEBUG
actor {
default-dispatcher {
throughput = 10 # Throughput for default Dispatcher, set to 1 for as fair as possible
# Throughput for default Dispatcher, set to 1 for as fair as possible
throughput = 10
}
}
remote {
server {
port = 2562 # The port clients should connect to. Default is 2552 (AKKA)
# The port clients should connect to. Default is 2552 (AKKA)
port = 2562
}
}
}

View file

@ -154,8 +154,9 @@ if not specified otherwise.
akka {
actor {
default-dispatcher {
task-queue-size = 1000 # If negative (or zero) then an unbounded mailbox is used (default)
# If negative (or zero) then an unbounded mailbox is used (default)
# If positive then a bounded mailbox is used and the capacity is set to the number specified
task-queue-size = 1000
}
}
}

View file

@ -40,7 +40,8 @@ Here you can also define the log level.
akka {
# Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
event-handlers = ["akka.event.Logging$DefaultLogger"]
loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG
# Options: ERROR, WARNING, INFO, DEBUG
loglevel = "DEBUG"
}
The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-java`

View file

@ -1528,7 +1528,8 @@ when camel is added to the enabled-modules list in :ref:`configuration`, for exa
akka {
...
enabled-modules = ["camel"] # Options: ["remote", "camel", "http"]
# Options: ["remote", "camel", "http"]
enabled-modules = ["camel"]
...
}

View file

@ -16,13 +16,19 @@ object DispatcherDocSpec {
val config = """
//#my-dispatcher-config
my-dispatcher {
type = Dispatcher # Dispatcher is the name of the event-based dispatcher
daemonic = off # Toggles whether the threads created by this dispatcher should be daemons or not
core-pool-size-min = 2 # minimum number of threads to cap factor-based core number to
core-pool-size-factor = 2.0 # No of core threads ... ceil(available processors * factor)
core-pool-size-max = 10 # maximum number of threads to cap factor-based number to
throughput = 100 # Throughput defines the number of messages that are processed in a batch before the
# Dispatcher is the name of the event-based dispatcher
type = Dispatcher
# Toggles whether the threads created by this dispatcher should be daemons or not
daemonic = off
# minimum number of threads to cap factor-based core number to
core-pool-size-min = 2
# No of core threads ... ceil(available processors * factor)
core-pool-size-factor = 2.0
# maximum number of threads to cap factor-based number to
core-pool-size-max = 10
# Throughput defines the number of messages that are processed in a batch before the
# thread is returned to the pool. Set to 1 for as fair as possible.
throughput = 100
}
//#my-dispatcher-config
@ -31,8 +37,10 @@ object DispatcherDocSpec {
type = Dispatcher
core-pool-size-factor = 8.0
max-pool-size-factor = 16.0
task-queue-size = 100 # Specifies the bounded capacity of the task queue
task-queue-type = "array" # Specifies which type of task queue will be used, can be "array" or "linked" (default)
# Specifies the bounded capacity of the task queue
task-queue-size = 100
# Specifies which type of task queue will be used, can be "array" or "linked" (default)
task-queue-type = "array"
throughput = 3
}
//#my-bounded-config

View file

@ -152,8 +152,9 @@ if not specified otherwise.
akka {
actor {
default-dispatcher {
task-queue-size = 1000 # If negative (or zero) then an unbounded mailbox is used (default)
# If negative (or zero) then an unbounded mailbox is used (default)
# If positive then a bounded mailbox is used and the capacity is set to the number specified
task-queue-size = 1000
}
}
}

View file

@ -46,7 +46,8 @@ Here you can also define the log level.
akka {
# Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
event-handlers = ["akka.event.Logging$DefaultLogger"]
loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG
# Options: ERROR, WARNING, INFO, DEBUG
loglevel = "DEBUG"
}
The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-scala`

View file

@ -182,7 +182,8 @@ Akka can help you in this regard. It allows you to turn on an option for seriali
akka {
actor {
serialize-messages = on # does a deep clone of messages to ensure immutability
# does a deep clone of messages to ensure immutability
serialize-messages = on
}
}

View file

@ -3,7 +3,7 @@
##################################################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
actor {

View file

@ -3,7 +3,7 @@
#############################################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
actor {

View file

@ -3,19 +3,23 @@
################################################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
actor {
mailbox {
mongodb {
# Any specified collection name will be used as a prefix for collections that use durable mongo mailboxes
uri = "mongodb://localhost/akka.mailbox" # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections
# Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections
uri = "mongodb://localhost/akka.mailbox"
# Configurable timeouts for certain ops
timeout {
read = 3000ms # time to wait for a read to succeed before timing out the future
write = 3000ms # time to wait for a write to succeed before timing out the future
# time to wait for a read to succeed before timing out the future
read = 3000ms
# time to wait for a write to succeed before timing out the future
write = 3000ms
}
}
}

View file

@ -3,7 +3,7 @@
##############################################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
actor {

View file

@ -3,7 +3,7 @@
##################################################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
actor {

View file

@ -3,7 +3,7 @@
#####################################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
@ -13,11 +13,13 @@ akka {
default {
remote = "" # if this is set to a valid remote address, the named actor will be deployed at that node
# if this is set to a valid remote address, the named actor will be deployed at that node
# e.g. "akka://sys@host:port"
remote = ""
target {
nodes = [] # A list of hostnames and ports for instantiating the children of a non-direct router
# A list of hostnames and ports for instantiating the children of a non-direct router
# The format should be on "akka://sys@host:port", where:
# - sys is the remote actor system name
# - hostname can be either hostname or IP address the remote actor should connect to
@ -25,6 +27,8 @@ akka {
# The number of actor instances to be spawned is still taken from the nr-of-instances
# setting as for local routers; the instances will be distributed round-robin among the
# given nodes.
nodes = []
}
}
}
@ -35,18 +39,25 @@ akka {
use-compression = off
secure-cookie = "" # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh'
# Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh'
# or using 'akka.util.Crypt.generateSecureCookie'
secure-cookie = ""
remote-daemon-ack-timeout = 30s # Timeout for ACK of cluster operations, lik checking actor out etc.
# Timeout for ACK of cluster operations, lik checking actor out etc.
remote-daemon-ack-timeout = 30s
use-passive-connections = on # Reuse inbound connections for outbound messages
# Reuse inbound connections for outbound messages
use-passive-connections = on
failure-detector { # accrual failure detection config
threshold = 8 # defines the failure detector threshold
# accrual failure detection config
failure-detector {
# defines the failure detector threshold
# A low threshold is prone to generate many wrong suspicions but ensures a
# quick detection in the event of a real crash. Conversely, a high threshold
# generates fewer mistakes but needs more time to detect actual crashes
threshold = 8
max-sample-size = 1000
}
@ -55,30 +66,49 @@ akka {
frequency = 1s
}
compute-grid-dispatcher { # The dispatcher used for remote system messages
name = ComputeGridDispatcher # defaults to same settings as default-dispatcher
# The dispatcher used for remote system messages
compute-grid-dispatcher {
# defaults to same settings as default-dispatcher
name = ComputeGridDispatcher
}
server {
hostname = "" # The hostname or ip to bind the remoting to, InetAddress.getLocalHost.getHostAddress is used if empty
port = 2552 # The default remote server port clients should connect to. Default is 2552 (AKKA)
message-frame-size = 1 MiB # Increase this if you want to be able to send messages with large payloads
connection-timeout = 120s # Timeout duration
require-cookie = off # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)?
untrusted-mode = off # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect.
backlog = 4096 # Sets the size of the connection backlog
# The hostname or ip to bind the remoting to, InetAddress.getLocalHost.getHostAddress is used if empty
hostname = ""
# The default remote server port clients should connect to. Default is 2552 (AKKA)
port = 2552
# Increase this if you want to be able to send messages with large payloads
message-frame-size = 1 MiB
# Timeout duration
connection-timeout = 120s
# Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)?
require-cookie = off
# Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect.
untrusted-mode = off
# Sets the size of the connection backlog
backlog = 4096
}
client {
buffering {
retry-message-send-on-failure = off # Should message buffering on remote client error be used (buffer flushed on successful reconnect)
capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default)
# Should message buffering on remote client error be used (buffer flushed on successful reconnect)
retry-message-send-on-failure = off
# If negative (or zero) then an unbounded mailbox is used (default)
# If positive then a bounded mailbox is used and the capacity is set using the property
capacity = -1
}
reconnect-delay = 5s
read-timeout = 3600s
message-frame-size = 1 MiB
reconnection-time-window = 600s # Maximum time window that a client should try to reconnect for
# Maximum time window that a client should try to reconnect for
reconnection-time-window = 600s
}
}

View file

@ -3,14 +3,16 @@
##################################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
stm {
fair = on # Should global transactions be fair or non-fair (non fair yield better performance)
# Should global transactions be fair or non-fair (non fair yield better performance)
fair = on
max-retries = 1000
timeout = 5s # Default timeout for blocking transactions and transaction set
# Default timeout for blocking transactions and transaction set
timeout = 5s
write-skew = on
blocking-allowed = off
interruptible = off

View file

@ -3,12 +3,15 @@
######################################
# This the reference config file has all the default settings.
# Make your edits/overrides in your akka.conf.
# Make your edits/overrides in your application.conf.
akka {
test {
timefactor = 1.0 # factor by which to scale timeouts during tests, e.g. to account for shared build system load
filter-leeway = 3s # duration of EventFilter.intercept waits after the block is finished until all required messages are received
single-expect-default = 3s # duration to wait in expectMsg and friends outside of within() block by default
# factor by which to scale timeouts during tests, e.g. to account for shared build system load
timefactor = 1.0
# duration of EventFilter.intercept waits after the block is finished until all required messages are received
filter-leeway = 3s
# duration to wait in expectMsg and friends outside of within() block by default
single-expect-default = 3s
}
}