You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@brooklyn.apache.org by he...@apache.org on 2015/08/18 13:00:32 UTC

[17/64] incubator-brooklyn git commit: [BROOKLYN-162] Refactor package in ./core/util

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/task/system/internal/ExecWithLoggingHelpers.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/task/system/internal/ExecWithLoggingHelpers.java b/core/src/main/java/org/apache/brooklyn/core/util/task/system/internal/ExecWithLoggingHelpers.java
new file mode 100644
index 0000000..9e96674
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/task/system/internal/ExecWithLoggingHelpers.java
@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.task.system.internal;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
+import java.util.List;
+import java.util.Map;
+
+import org.slf4j.Logger;
+
+import brooklyn.config.ConfigKey;
+
+import org.apache.brooklyn.core.util.config.ConfigBag;
+import org.apache.brooklyn.core.util.flags.TypeCoercions;
+import org.apache.brooklyn.core.util.internal.ssh.ShellAbstractTool;
+import org.apache.brooklyn.core.util.internal.ssh.ShellTool;
+import org.apache.brooklyn.core.util.task.Tasks;
+import org.apache.brooklyn.location.basic.SshMachineLocation;
+
+import brooklyn.util.collections.MutableMap;
+import brooklyn.util.stream.StreamGobbler;
+import brooklyn.util.stream.Streams;
+import brooklyn.util.text.Strings;
+
+import com.google.common.base.Function;
+import com.google.common.base.Throwables;
+
+public abstract class ExecWithLoggingHelpers {
+
+    public static final ConfigKey<OutputStream> STDOUT = SshMachineLocation.STDOUT;
+    public static final ConfigKey<OutputStream> STDERR = SshMachineLocation.STDERR;
+    public static final ConfigKey<Boolean> NO_STDOUT_LOGGING = SshMachineLocation.NO_STDOUT_LOGGING;
+    public static final ConfigKey<Boolean> NO_STDERR_LOGGING = SshMachineLocation.NO_STDERR_LOGGING;
+    public static final ConfigKey<String> LOG_PREFIX = SshMachineLocation.LOG_PREFIX;
+
+    protected final String shortName;
+    protected Logger commandLogger = null;
+    
+    public interface ExecRunner {
+        public int exec(ShellTool ssh, Map<String,?> flags, List<String> cmds, Map<String,?> env);
+    }
+
+    protected abstract <T> T execWithTool(MutableMap<String, Object> toolCreationAndConnectionProperties, Function<ShellTool, T> runMethodOnTool);
+    protected abstract void preExecChecks();
+    protected abstract String getTargetName();
+    protected abstract String constructDefaultLoggingPrefix(ConfigBag execFlags);
+
+    /** takes a very short name for use in blocking details, e.g. SSH or Process */
+    public ExecWithLoggingHelpers(String shortName) {
+        this.shortName = shortName;
+    }
+
+    public ExecWithLoggingHelpers logger(Logger commandLogger) {
+        this.commandLogger = commandLogger;
+        return this;
+    }
+    
+    public int execScript(Map<String,?> props, String summaryForLogging, List<String> commands, Map<String,?> env) {
+        // TODO scriptHeader are the extra commands we expect the SshTool/ShellTool to add.
+        // Would be better if could get this from the ssh-tool, rather than assuming it will behave as
+        // we expect.
+        String scriptHeader = ShellAbstractTool.getOptionalVal(props, ShellTool.PROP_SCRIPT_HEADER);
+        
+        return execWithLogging(props, summaryForLogging, commands, env, scriptHeader, new ExecRunner() {
+                @Override public int exec(ShellTool ssh, Map<String, ?> flags, List<String> cmds, Map<String, ?> env) {
+                    return ssh.execScript(flags, cmds, env);
+                }});
+    }
+
+    protected static <T> T getOptionalVal(Map<String,?> map, ConfigKey<T> keyC) {
+        if (keyC==null) return null;
+        String key = keyC.getName();
+        if (map!=null && map.containsKey(key)) {
+            return TypeCoercions.coerce(map.get(key), keyC.getTypeToken());
+        } else {
+            return keyC.getDefaultValue();
+        }
+    }
+
+    public int execCommands(Map<String,?> props, String summaryForLogging, List<String> commands, Map<String,?> env) {
+        return execWithLogging(props, summaryForLogging, commands, env, new ExecRunner() {
+                @Override public int exec(ShellTool tool, Map<String,?> flags, List<String> cmds, Map<String,?> env) {
+                    return tool.execCommands(flags, cmds, env);
+                }});
+    }
+
+    public int execWithLogging(Map<String,?> props, final String summaryForLogging, final List<String> commands,
+            final Map<String,?> env, final ExecRunner execCommand) {
+        return execWithLogging(props, summaryForLogging, commands, env, null, execCommand);
+    }
+    
+    @SuppressWarnings("resource")
+    public int execWithLogging(Map<String,?> props, final String summaryForLogging, final List<String> commands,
+            final Map<String,?> env, String expectedCommandHeaders, final ExecRunner execCommand) {
+        if (commandLogger!=null && commandLogger.isDebugEnabled()) {
+            String allcmds = (Strings.isBlank(expectedCommandHeaders) ? "" : expectedCommandHeaders + " ; ") + Strings.join(commands, " ; ");
+            commandLogger.debug("{}, initiating "+shortName.toLowerCase()+" on machine {}{}: {}",
+                    new Object[] {summaryForLogging, getTargetName(),
+                    env!=null && !env.isEmpty() ? " (env "+env+")": "", allcmds});
+        }
+
+        if (commands.isEmpty()) {
+            if (commandLogger!=null && commandLogger.isDebugEnabled())
+                commandLogger.debug("{}, on machine {}, ending: no commands to run", summaryForLogging, getTargetName());
+            return 0;
+        }
+
+        final ConfigBag execFlags = new ConfigBag().putAll(props);
+        // some props get overridden in execFlags, so remove them from the tool flags
+        final ConfigBag toolFlags = new ConfigBag().putAll(props).removeAll(
+                LOG_PREFIX, STDOUT, STDERR, ShellTool.PROP_NO_EXTRA_OUTPUT);
+
+        execFlags.configure(ShellTool.PROP_SUMMARY, summaryForLogging);
+        
+        PipedOutputStream outO = null;
+        PipedOutputStream outE = null;
+        StreamGobbler gO=null, gE=null;
+        try {
+            preExecChecks();
+            
+            String logPrefix = execFlags.get(LOG_PREFIX);
+            if (logPrefix==null) logPrefix = constructDefaultLoggingPrefix(execFlags);
+
+            if (!execFlags.get(NO_STDOUT_LOGGING)) {
+                PipedInputStream insO = new PipedInputStream();
+                outO = new PipedOutputStream(insO);
+
+                String stdoutLogPrefix = "["+(logPrefix != null ? logPrefix+":stdout" : "stdout")+"] ";
+                gO = new StreamGobbler(insO, execFlags.get(STDOUT), commandLogger).setLogPrefix(stdoutLogPrefix);
+                gO.start();
+
+                execFlags.put(STDOUT, outO);
+            }
+
+            if (!execFlags.get(NO_STDERR_LOGGING)) {
+                PipedInputStream insE = new PipedInputStream();
+                outE = new PipedOutputStream(insE);
+
+                String stderrLogPrefix = "["+(logPrefix != null ? logPrefix+":stderr" : "stderr")+"] ";
+                gE = new StreamGobbler(insE, execFlags.get(STDERR), commandLogger).setLogPrefix(stderrLogPrefix);
+                gE.start();
+
+                execFlags.put(STDERR, outE);
+            }
+
+            Tasks.setBlockingDetails(shortName+" executing, "+summaryForLogging);
+            try {
+                return execWithTool(MutableMap.copyOf(toolFlags.getAllConfig()), new Function<ShellTool, Integer>() {
+                    public Integer apply(ShellTool tool) {
+                        int result = execCommand.exec(tool, MutableMap.copyOf(execFlags.getAllConfig()), commands, env);
+                        if (commandLogger!=null && commandLogger.isDebugEnabled()) 
+                            commandLogger.debug("{}, on machine {}, completed: return status {}",
+                                    new Object[] {summaryForLogging, getTargetName(), result});
+                        return result;
+                    }});
+
+            } finally {
+                Tasks.setBlockingDetails(null);
+            }
+
+        } catch (IOException e) {
+            if (commandLogger!=null && commandLogger.isDebugEnabled()) 
+                commandLogger.debug("{}, on machine {}, failed: {}", new Object[] {summaryForLogging, getTargetName(), e});
+            throw Throwables.propagate(e);
+        } finally {
+            // Must close the pipedOutStreams, otherwise input will never read -1 so StreamGobbler thread would never die
+            if (outO!=null) try { outO.flush(); } catch (IOException e) {}
+            if (outE!=null) try { outE.flush(); } catch (IOException e) {}
+            Streams.closeQuietly(outO);
+            Streams.closeQuietly(outE);
+
+            try {
+                if (gE!=null) { gE.join(); }
+                if (gO!=null) { gO.join(); }
+            } catch (InterruptedException e) {
+                Thread.currentThread().interrupt();
+                Throwables.propagate(e);
+            }
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/task/system/internal/SystemProcessTaskFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/task/system/internal/SystemProcessTaskFactory.java b/core/src/main/java/org/apache/brooklyn/core/util/task/system/internal/SystemProcessTaskFactory.java
new file mode 100644
index 0000000..4a6dacb
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/task/system/internal/SystemProcessTaskFactory.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.task.system.internal;
+
+import java.io.File;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.brooklyn.core.util.config.ConfigBag;
+import org.apache.brooklyn.core.util.internal.ssh.ShellTool;
+import org.apache.brooklyn.core.util.internal.ssh.process.ProcessTool;
+import org.apache.brooklyn.core.util.task.system.ProcessTaskWrapper;
+import org.apache.brooklyn.location.basic.SshMachineLocation;
+
+import brooklyn.util.collections.MutableMap;
+
+import com.google.common.base.Function;
+
+public class SystemProcessTaskFactory<T extends SystemProcessTaskFactory<T,RET>,RET> extends AbstractProcessTaskFactory<T, RET> {
+
+    private static final Logger log = LoggerFactory.getLogger(SystemProcessTaskFactory.class);
+    
+    // FIXME Plum this through?!
+    private File directory;
+    private Boolean loginShell;
+
+    public SystemProcessTaskFactory(String ...commands) {
+        super(commands);
+    }
+    
+    public T directory(File directory) {
+        markDirty();
+        this.directory = directory;
+        return self();
+    }
+    
+    public T loginShell(boolean loginShell) {
+        markDirty();
+        this.loginShell = loginShell;
+        return self();
+    }
+    
+    @Override
+    public T machine(SshMachineLocation machine) {
+        log.warn("Not permitted to set machines on "+this+" (ignoring - "+machine+")");
+        if (log.isDebugEnabled())
+            log.debug("Source of attempt to set machines on "+this+" ("+machine+")",
+                    new Throwable("Source of attempt to set machines on "+this+" ("+machine+")"));
+        return self();
+    }
+
+    @Override
+    public ProcessTaskWrapper<RET> newTask() {
+        return new SystemProcessTaskWrapper();
+    }
+
+    protected class SystemProcessTaskWrapper extends ProcessTaskWrapper<RET> {
+        protected final String taskTypeShortName;
+        
+        public SystemProcessTaskWrapper() {
+            this("Process");
+        }
+        public SystemProcessTaskWrapper(String taskTypeShortName) {
+            super(SystemProcessTaskFactory.this);
+            this.taskTypeShortName = taskTypeShortName;
+        }
+        @Override
+        protected ConfigBag getConfigForRunning() {
+            ConfigBag result = super.getConfigForRunning();
+            if (directory != null) config.put(ProcessTool.PROP_DIRECTORY, directory.getAbsolutePath());
+            if (loginShell != null) config.put(ProcessTool.PROP_LOGIN_SHELL, loginShell);
+            return result;
+        }
+        @Override
+        protected void run(ConfigBag config) {
+            if (Boolean.FALSE.equals(this.runAsScript)) {
+                this.exitCode = newExecWithLoggingHelpers().execCommands(config.getAllConfig(), getSummary(), getCommands(), getShellEnvironment());
+            } else { // runScript = null or TRUE
+                this.exitCode = newExecWithLoggingHelpers().execScript(config.getAllConfig(), getSummary(), getCommands(), getShellEnvironment());
+            }
+        }
+        @Override
+        protected String taskTypeShortName() { return taskTypeShortName; }
+    }
+    
+    protected ExecWithLoggingHelpers newExecWithLoggingHelpers() {
+        return new ExecWithLoggingHelpers("Process") {
+            @Override
+            protected <U> U execWithTool(MutableMap<String, Object> props, Function<ShellTool, U> task) {
+                // properties typically passed to both
+                if (log.isDebugEnabled() && props!=null && !props.isEmpty())
+                    log.debug("Ignoring flags "+props+" when running "+this);
+                return task.apply(new ProcessTool());
+            }
+            @Override
+            protected void preExecChecks() {}
+            @Override
+            protected String constructDefaultLoggingPrefix(ConfigBag execFlags) {
+                return "system.exec";
+            }
+            @Override
+            protected String getTargetName() {
+                return "local host";
+            }
+        }.logger(log);
+    }
+
+    /** concrete instance (for generics) */
+    public static class ConcreteSystemProcessTaskFactory<RET> extends SystemProcessTaskFactory<ConcreteSystemProcessTaskFactory<RET>, RET> {
+        public ConcreteSystemProcessTaskFactory(String ...commands) {
+            super(commands);
+        }
+    }
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/text/DataUriSchemeParser.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/text/DataUriSchemeParser.java b/core/src/main/java/org/apache/brooklyn/core/util/text/DataUriSchemeParser.java
new file mode 100644
index 0000000..d4ed26c
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/text/DataUriSchemeParser.java
@@ -0,0 +1,267 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.text;
+
+import java.io.ByteArrayInputStream;
+import java.io.UnsupportedEncodingException;
+import java.net.MalformedURLException;
+import java.net.URLDecoder;
+import java.nio.charset.Charset;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import brooklyn.util.exceptions.Exceptions;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.io.BaseEncoding;
+//import com.sun.jersey.core.util.Base64;
+
+/** implementation (currently hokey) of RFC-2397 data: URI scheme.
+ * see: http://stackoverflow.com/questions/12353552/any-rfc-2397-data-uri-parser-for-java */
+public class DataUriSchemeParser {
+
+    public static final String PROTOCOL_PREFIX = "data:";
+    public static final String DEFAULT_MIME_TYPE = "text/plain";
+    public static final String DEFAULT_CHARSET = "US-ASCII";
+    
+    private final String url;
+    private int parseIndex = 0;
+    private boolean isParsed = false;
+    private boolean allowMissingComma = false;
+    private boolean allowSlashesAfterColon = false;
+    private boolean allowOtherLaxities = false;
+    
+    private String mimeType;
+    private byte[] data;
+    private Map<String,String> parameters = new LinkedHashMap<String,String>();
+
+    public DataUriSchemeParser(String url) {
+        this.url = Preconditions.checkNotNull(url, "url");
+    }
+
+    // ---- static conveniences -----
+    
+    public static String toString(String url) {
+        return new DataUriSchemeParser(url).lax().parse().getDataAsString();
+    }
+
+    public static byte[] toBytes(String url) {
+        return new DataUriSchemeParser(url).lax().parse().getData();
+    }
+
+    // ---- accessors (once it is parsed) -----------
+    
+    public String getCharset() {
+        String charset = parameters.get("charset");
+        if (charset!=null) return charset;
+        return DEFAULT_CHARSET;
+    }
+
+    public String getMimeType() {
+        assertParsed();
+        if (mimeType!=null) return mimeType;
+        return DEFAULT_MIME_TYPE;
+    }
+    
+    public Map<String, String> getParameters() {
+        return ImmutableMap.<String, String>copyOf(parameters);
+    }
+
+    public byte[] getData() {
+        assertParsed();
+        return data;
+    }
+    
+    public ByteArrayInputStream getDataAsInputStream() {
+        return new ByteArrayInputStream(getData());
+    }
+
+    public String getDataAsString() {
+        return new String(getData(), Charset.forName(getCharset()));
+    }
+
+    // ---- config ------------------
+    
+    public synchronized DataUriSchemeParser lax() {
+        return allowMissingComma(true).allowSlashesAfterColon(true).allowOtherLaxities(true);
+    }
+        
+    public synchronized DataUriSchemeParser allowMissingComma(boolean allowMissingComma) {
+        assertNotParsed();
+        this.allowMissingComma = allowMissingComma;
+        return this;
+    }
+    
+    public synchronized DataUriSchemeParser allowSlashesAfterColon(boolean allowSlashesAfterColon) {
+        assertNotParsed();
+        this.allowSlashesAfterColon = allowSlashesAfterColon;
+        return this;
+    }
+    
+    private synchronized DataUriSchemeParser allowOtherLaxities(boolean allowOtherLaxities) {
+        assertNotParsed();
+        this.allowOtherLaxities = allowOtherLaxities;
+        return this;
+    }
+    
+    private void assertNotParsed() {
+        if (isParsed) throw new IllegalStateException("Operation not permitted after parsing");
+    }
+
+    private void assertParsed() {
+        if (!isParsed) throw new IllegalStateException("Operation not permitted before parsing");
+    }
+
+    public synchronized DataUriSchemeParser parse() {
+        try {
+            return parseChecked();
+        } catch (Exception e) {
+            throw Exceptions.propagate(e);
+        }
+    }
+    
+    public synchronized DataUriSchemeParser parseChecked() throws UnsupportedEncodingException, MalformedURLException {
+        if (isParsed) return this;
+        
+        skipOptional(PROTOCOL_PREFIX);
+        if (allowSlashesAfterColon)
+            while (skipOptional("/")) ;
+        
+        if (allowMissingComma && remainder().indexOf(',')==-1) {
+            mimeType = DEFAULT_MIME_TYPE;
+            parameters.put("charset", DEFAULT_CHARSET);
+        } else {        
+            parseMediaType();
+            parseParameterOrParameterValues();
+            skipRequired(",");
+        }
+        
+        parseData();
+        
+        isParsed = true;
+        return this;
+    }
+
+    private void parseMediaType() throws MalformedURLException {
+        if (remainder().startsWith(";") || remainder().startsWith(","))
+            return;
+        int slash = remainder().indexOf("/");
+        if (slash==-1) throw new MalformedURLException("Missing required '/' in MIME type of data: URL");
+        String type = read(slash);
+        skipRequired("/");
+        int next = nextSemiOrComma();
+        String subtype = read(next);
+        mimeType = type+"/"+subtype;
+    }
+
+    private String read(int next) {
+        String result = remainder().substring(0, next);
+        parseIndex += next;
+        return result;
+    }
+
+    private int nextSemiOrComma() throws MalformedURLException {
+        int semi = remainder().indexOf(';');
+        int comma = remainder().indexOf(',');
+        if (semi<0 && comma<0) throw new MalformedURLException("Missing required ',' in data: URL");
+        if (semi<0) return comma;
+        if (comma<0) return semi;
+        return Math.min(semi, comma);
+    }
+
+    private void parseParameterOrParameterValues() throws MalformedURLException {
+        while (true) {
+            if (!remainder().startsWith(";")) return;
+            parseIndex++;
+            int eq = remainder().indexOf('=');
+            String word, value;
+            int nextSemiOrComma = nextSemiOrComma();
+            if (eq==-1 || eq>nextSemiOrComma) {
+                word = read(nextSemiOrComma);
+                value = null;
+            } else {
+                word = read(eq);
+                if (remainder().startsWith("\"")) {
+                    // is quoted
+                    parseIndex++;
+                    int nextUnescapedQuote = nextUnescapedQuote();
+                    value = "\"" + read(nextUnescapedQuote);
+                } else {
+                    value = read(nextSemiOrComma());
+                }
+            }
+            parameters.put(word, value);
+        }
+    }
+
+    private int nextUnescapedQuote() throws MalformedURLException {
+        int i=0;
+        String r = remainder();
+        boolean escaped = false;
+        while (i<r.length()) {
+            if (escaped) {
+                escaped = false;
+            } else {
+                if (r.charAt(i)=='"') return i;
+                if (r.charAt(i)=='\\') escaped = true;
+            }
+            i++;
+        }
+        throw new MalformedURLException("Unclosed double-quote in data: URL");
+    }
+
+    private void parseData() throws UnsupportedEncodingException, MalformedURLException {
+        if (parameters.containsKey("base64")) {
+            checkNoParamValue("base64");
+            data = BaseEncoding.base64().decode(remainder());
+        } else if (parameters.containsKey("base64url")) {
+            checkNoParamValue("base64url");
+            data = BaseEncoding.base64Url().decode(remainder());
+        } else {
+            data = URLDecoder.decode(remainder(), getCharset()).getBytes(Charset.forName(getCharset()));
+        }
+    }
+
+    private void checkNoParamValue(String param) throws MalformedURLException {
+        if (allowOtherLaxities) return; 
+        String value = parameters.get(param);
+        if (value!=null)
+            throw new MalformedURLException(param+" parameter must not take a value ("+value+") in data: URL");
+    }
+
+    private String remainder() {
+        return url.substring(parseIndex);
+    }
+
+    private boolean skipOptional(String word) {
+        if (remainder().startsWith(word)) {
+            parseIndex += word.length();
+            return true;
+        }
+        return false;
+    }
+
+    private void skipRequired(String word) throws MalformedURLException {
+        if (!remainder().startsWith(word))
+            throw new MalformedURLException("Missing required '"+word+"' at position "+parseIndex+" of data: URL");
+        parseIndex += word.length();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/text/TemplateProcessor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/text/TemplateProcessor.java b/core/src/main/java/org/apache/brooklyn/core/util/text/TemplateProcessor.java
new file mode 100644
index 0000000..6fb3c15
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/text/TemplateProcessor.java
@@ -0,0 +1,398 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.text;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.Map;
+
+import org.apache.brooklyn.api.entity.Entity;
+import org.apache.brooklyn.api.entity.drivers.EntityDriver;
+import org.apache.brooklyn.api.event.AttributeSensor;
+import org.apache.brooklyn.api.location.Location;
+import org.apache.brooklyn.api.management.ManagementContext;
+import org.apache.brooklyn.core.management.internal.ManagementContextInternal;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import brooklyn.entity.basic.ConfigKeys;
+import brooklyn.entity.basic.Entities;
+import brooklyn.entity.basic.EntityInternal;
+import brooklyn.event.basic.DependentConfiguration;
+import brooklyn.event.basic.Sensors;
+import brooklyn.util.collections.MutableMap;
+import brooklyn.util.exceptions.Exceptions;
+import brooklyn.util.text.Strings;
+
+import com.google.common.base.Charsets;
+import com.google.common.collect.Iterables;
+import com.google.common.io.Files;
+
+import freemarker.cache.StringTemplateLoader;
+import freemarker.template.Configuration;
+import freemarker.template.ObjectWrapper;
+import freemarker.template.Template;
+import freemarker.template.TemplateHashModel;
+import freemarker.template.TemplateModel;
+import freemarker.template.TemplateModelException;
+
+/** A variety of methods to assist in Freemarker template processing,
+ * including passing in maps with keys flattened (dot-separated namespace),
+ * and accessing {@link ManagementContext} brooklyn.properties 
+ * and {@link Entity}, {@link EntityDriver}, and {@link Location} methods and config.
+ * <p>
+ * See {@link #processTemplateContents(String, ManagementContextInternal, Map)} for
+ * a description of how management access is done.
+ */
+public class TemplateProcessor {
+
+    private static final Logger log = LoggerFactory.getLogger(TemplateProcessor.class);
+
+    protected static TemplateModel wrapAsTemplateModel(Object o) throws TemplateModelException {
+        if (o instanceof Map) return new DotSplittingTemplateModel((Map<?,?>)o);
+        return ObjectWrapper.DEFAULT_WRAPPER.wrap(o);
+    }
+    
+    /** @deprecated since 0.7.0 use {@link #processTemplateFile(String, Map)} */ @Deprecated
+    public static String processTemplate(String templateFileName, Map<String, ? extends Object> substitutions) {
+        return processTemplateFile(templateFileName, substitutions);
+    }
+    
+    /** As per {@link #processTemplateContents(String, Map)}, but taking a file. */
+    public static String processTemplateFile(String templateFileName, Map<String, ? extends Object> substitutions) {
+        String templateContents;
+        try {
+            templateContents = Files.toString(new File(templateFileName), Charsets.UTF_8);
+        } catch (IOException e) {
+            log.warn("Error loading file " + templateFileName, e);
+            throw Exceptions.propagate(e);
+        }
+        return processTemplateContents(templateContents, substitutions);
+    }
+
+    /** @deprecated since 0.7.0 use {@link #processTemplateFile(String, EntityDriver, Map)} */ @Deprecated
+    public static String processTemplate(String templateFileName, EntityDriver driver, Map<String, ? extends Object> extraSubstitutions) {
+        return processTemplateFile(templateFileName, driver, extraSubstitutions);
+    }
+    
+    /** Processes template contents according to {@link EntityAndMapTemplateModel}. */
+    public static String processTemplateFile(String templateFileName, EntityDriver driver, Map<String, ? extends Object> extraSubstitutions) {
+        String templateContents;
+        try {
+            templateContents = Files.toString(new File(templateFileName), Charsets.UTF_8);
+        } catch (IOException e) {
+            log.warn("Error loading file " + templateFileName, e);
+            throw Exceptions.propagate(e);
+        }
+        return processTemplateContents(templateContents, driver, extraSubstitutions);
+    }
+
+    /** Processes template contents according to {@link EntityAndMapTemplateModel}. */
+    public static String processTemplateContents(String templateContents, EntityDriver driver, Map<String,? extends Object> extraSubstitutions) {
+        return processTemplateContents(templateContents, new EntityAndMapTemplateModel(driver, extraSubstitutions));
+    }
+
+    /** Processes template contents according to {@link EntityAndMapTemplateModel}. */
+    public static String processTemplateContents(String templateContents, ManagementContext managementContext, Map<String,? extends Object> extraSubstitutions) {
+        return processTemplateContents(templateContents, new EntityAndMapTemplateModel(managementContext, extraSubstitutions));
+    }
+
+    /**
+     * A Freemarker {@link TemplateHashModel} which will correctly handle entries of the form "a.b" in this map,
+     * matching against template requests for "${a.b}".
+     * <p>
+     * Freemarker requests "a" in a map when given such a request, and expects that to point to a map
+     * with a key "b". This model provides such maps even for "a.b" in a map.
+     * <p>
+     * However if "a" <b>and</b> "a.b" are in the map, this will <b>not</b> currently do the deep mapping.
+     * (It does not have enough contextual information from Freemarker to handle this case.) */
+    public static final class DotSplittingTemplateModel implements TemplateHashModel {
+        protected final Map<?,?> map;
+
+        protected DotSplittingTemplateModel(Map<?,?> map) {
+            this.map = map;
+        }
+
+        @Override
+        public boolean isEmpty() { return map!=null && map.isEmpty(); }
+
+        public boolean contains(String key) {
+            if (map==null) return false;
+            if (map.containsKey(key)) return true;
+            for (Map.Entry<?,?> entry: map.entrySet()) {
+                String k = Strings.toString(entry.getKey());
+                if (k.startsWith(key+".")) {
+                    // contains this prefix
+                    return true;
+                }
+            }
+            return false;
+        }
+        
+        @Override
+        public TemplateModel get(String key) throws TemplateModelException {
+            if (map==null) return null;
+            try {
+                if (map.containsKey(key)) 
+                    return wrapAsTemplateModel( map.get(key) );
+                
+                Map<String,Object> result = MutableMap.of();
+                for (Map.Entry<?,?> entry: map.entrySet()) {
+                    String k = Strings.toString(entry.getKey());
+                    if (k.startsWith(key+".")) {
+                        String k2 = Strings.removeFromStart(k, key+".");
+                        result.put(k2, entry.getValue());
+                    }
+                }
+                if (!result.isEmpty()) 
+                        return wrapAsTemplateModel( result );
+                
+            } catch (Exception e) {
+                Exceptions.propagateIfFatal(e);
+                throw new IllegalStateException("Error accessing config '"+key+"'"+": "+e, e);
+            }
+            
+            return null;
+        }
+        
+        @Override
+        public String toString() {
+            return getClass().getName()+"["+map+"]";
+        }
+    }
+    
+    /** FreeMarker {@link TemplateHashModel} which resolves keys inside the given entity or management context.
+     * Callers are required to include dots for dot-separated keys.
+     * Freemarker will only due this when in inside bracket notation in an outer map, as in <code>${outer['a.b.']}</code>; 
+     * as a result this is intended only for use by {@link EntityAndMapTemplateModel} where 
+     * a caller has used bracked notation, as in <code>${mgmt['key.subkey']}</code>. */
+    protected static final class EntityConfigTemplateModel implements TemplateHashModel {
+        protected final EntityInternal entity;
+        protected final ManagementContext mgmt;
+
+        protected EntityConfigTemplateModel(EntityInternal entity) {
+            this.entity = entity;
+            this.mgmt = entity.getManagementContext();
+        }
+
+        protected EntityConfigTemplateModel(ManagementContext mgmt) {
+            this.entity = null;
+            this.mgmt = mgmt;
+        }
+
+        @Override
+        public boolean isEmpty() { return false; }
+
+        @Override
+        public TemplateModel get(String key) throws TemplateModelException {
+            try {
+                Object result = null;
+                
+                if (entity!=null)
+                    result = entity.getConfig(ConfigKeys.builder(Object.class).name(key).build());
+                if (result==null && mgmt!=null)
+                    result = mgmt.getConfig().getConfig(ConfigKeys.builder(Object.class).name(key).build());
+                
+                if (result!=null)
+                    return wrapAsTemplateModel( result );
+                
+            } catch (Exception e) {
+                Exceptions.propagateIfFatal(e);
+                throw new IllegalStateException("Error accessing config '"+key+"'"
+                    + (entity!=null ? " on "+entity : "")+": "+e, e);
+            }
+            
+            return null;
+        }
+        
+        @Override
+        public String toString() {
+            return getClass().getName()+"["+entity+"]";
+        }
+    }
+
+    protected final static class EntityAttributeTemplateModel implements TemplateHashModel {
+        protected final EntityInternal entity;
+
+        protected EntityAttributeTemplateModel(EntityInternal entity) {
+            this.entity = entity;
+        }
+
+        @Override
+        public boolean isEmpty() throws TemplateModelException {
+            return false;
+        }
+
+        @Override
+        public TemplateModel get(String key) throws TemplateModelException {
+            Object result;
+            try {
+                result = Entities.submit(entity, DependentConfiguration.attributeWhenReady(entity,
+                        Sensors.builder(Object.class, key).persistence(AttributeSensor.SensorPersistenceMode.NONE).build())).get();
+            } catch (Exception e) {
+                throw Exceptions.propagate(e);
+            }
+            if (result == null) {
+                return null;
+            } else {
+                return wrapAsTemplateModel(result);
+            }
+        }
+
+        @Override
+        public String toString() {
+            return getClass().getName()+"["+entity+"]";
+        }
+    }
+
+    /**
+     * Provides access to config on an entity or management context, using
+     * <code>${config['entity.config.key']}</code> or <code>${mgmt['brooklyn.properties.key']}</code> notation,
+     * and also allowing access to <code>getX()</code> methods on entity (interface) or driver
+     * using <code>${entity.x}</code> or <code><${driver.x}</code>.
+     * Optional extra properties can be supplied, treated as per {@link DotSplittingTemplateModel}.
+     */
+    protected static final class EntityAndMapTemplateModel implements TemplateHashModel {
+        protected final EntityInternal entity;
+        protected final EntityDriver driver;
+        protected final ManagementContext mgmt;
+        protected final DotSplittingTemplateModel extraSubstitutionsModel;
+
+        protected EntityAndMapTemplateModel(ManagementContext mgmt, Map<String,? extends Object> extraSubstitutions) {
+            this.entity = null;
+            this.driver = null;
+            this.mgmt = mgmt;
+            this.extraSubstitutionsModel = new DotSplittingTemplateModel(extraSubstitutions);
+        }
+
+        protected EntityAndMapTemplateModel(EntityDriver driver, Map<String,? extends Object> extraSubstitutions) {
+            this.driver = driver;
+            this.entity = (EntityInternal) driver.getEntity();
+            this.mgmt = entity.getManagementContext();
+            this.extraSubstitutionsModel = new DotSplittingTemplateModel(extraSubstitutions);
+        }
+
+        protected EntityAndMapTemplateModel(EntityInternal entity, Map<String,? extends Object> extraSubstitutions) {
+            this.entity = entity;
+            this.driver = null;
+            this.mgmt = entity.getManagementContext();
+            this.extraSubstitutionsModel = new DotSplittingTemplateModel(extraSubstitutions);
+        }
+
+        @Override
+        public boolean isEmpty() { return false; }
+
+        @Override
+        public TemplateModel get(String key) throws TemplateModelException {
+            if (extraSubstitutionsModel.contains(key))
+                return wrapAsTemplateModel( extraSubstitutionsModel.get(key) );
+
+            if ("entity".equals(key) && entity!=null)
+                return wrapAsTemplateModel( entity );
+            if ("config".equals(key)) {
+                if (entity!=null)
+                    return new EntityConfigTemplateModel(entity);
+                else
+                    return new EntityConfigTemplateModel(mgmt);
+            }
+            if ("mgmt".equals(key)) {
+                return new EntityConfigTemplateModel(mgmt);
+            }
+
+            if ("driver".equals(key) && driver!=null)
+                return wrapAsTemplateModel( driver );
+            if ("location".equals(key)) {
+                if (driver!=null && driver.getLocation()!=null)
+                    return wrapAsTemplateModel( driver.getLocation() );
+                if (entity!=null)
+                    return wrapAsTemplateModel( Iterables.getOnlyElement( entity.getLocations() ) );
+            }
+            if ("attribute".equals(key)) {
+                return new EntityAttributeTemplateModel(entity);
+            }
+            
+            if (mgmt!=null) {
+                // TODO deprecated in 0.7.0, remove after next version
+                // ie not supported to access global props without qualification
+                Object result = mgmt.getConfig().getConfig(ConfigKeys.builder(Object.class).name(key).build());
+                if (result!=null) { 
+                    log.warn("Deprecated access of global brooklyn.properties value for "+key+"; should be qualified with 'mgmt.'");
+                    return wrapAsTemplateModel( result );
+                }
+            }
+            
+            if ("javaSysProps".equals(key))
+                return wrapAsTemplateModel( System.getProperties() );
+
+            return null;
+        }
+        
+        @Override
+        public String toString() {
+            return getClass().getName()+"["+(entity!=null ? entity : mgmt)+"]";
+        }
+    }
+
+    /** Processes template contents with the given items in scope as per {@link EntityAndMapTemplateModel}. */
+    public static String processTemplateContents(String templateContents, final EntityInternal entity, Map<String,? extends Object> extraSubstitutions) {
+        return processTemplateContents(templateContents, new EntityAndMapTemplateModel(entity, extraSubstitutions));
+    }
+    
+    /** Processes template contents using the given map, passed to freemarker,
+     * with dot handling as per {@link DotSplittingTemplateModel}. */
+    public static String processTemplateContents(String templateContents, final Map<String, ? extends Object> substitutions) {
+        TemplateHashModel root;
+        try {
+            root = substitutions != null
+                ? (TemplateHashModel)wrapAsTemplateModel(substitutions)
+                : null;
+        } catch (TemplateModelException e) {
+            throw new IllegalStateException("Unable to set up TemplateHashModel to parse template, given "+substitutions+": "+e, e);
+        }
+        
+        return processTemplateContents(templateContents, root);
+    }
+    
+    /** Processes template contents against the given {@link TemplateHashModel}. */
+    public static String processTemplateContents(String templateContents, final TemplateHashModel substitutions) {
+        try {
+            Configuration cfg = new Configuration();
+            StringTemplateLoader templateLoader = new StringTemplateLoader();
+            templateLoader.putTemplate("config", templateContents);
+            cfg.setTemplateLoader(templateLoader);
+            Template template = cfg.getTemplate("config");
+
+            // TODO could expose CAMP '$brooklyn:' style dsl, based on template.createProcessingEnvironment
+            ByteArrayOutputStream baos = new ByteArrayOutputStream();
+            Writer out = new OutputStreamWriter(baos);
+            template.process(substitutions, out);
+            out.flush();
+
+            return new String(baos.toByteArray());
+        } catch (Exception e) {
+            log.warn("Error processing template (propagating): "+e, e);
+            log.debug("Template which could not be parsed (causing "+e+") is:"
+                + (Strings.isMultiLine(templateContents) ? "\n"+templateContents : templateContents));
+            throw Exceptions.propagate(e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/CompilerIndependentOuterClassFieldMapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/CompilerIndependentOuterClassFieldMapper.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/CompilerIndependentOuterClassFieldMapper.java
new file mode 100644
index 0000000..68c7382
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/CompilerIndependentOuterClassFieldMapper.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.thoughtworks.xstream.core.Caching;
+import com.thoughtworks.xstream.mapper.Mapper;
+import com.thoughtworks.xstream.mapper.MapperWrapper;
+
+/**
+ * <p>Compiler independent outer class field mapper.</p>
+ * <p>Different compilers generate different indexes for the names of outer class reference
+ *    field (this$N) leading to deserialization errors.</p>
+ * <ul>
+ *   <li> eclipse-[groovy-]compiler counts all outer static classes
+ *   <li> OpenJDK/Oracle/IBM compiler starts at 0, regardless of the nesting level
+ * </ul>
+ * <p>The mapper will be able to update field names for instances with a single this$N
+ *    field only (including those from parent classes).</p>
+ * <p>For difference between generated field names compare
+ *    {@code src/test/java/brooklyn/util/xstream/compiler_compatibility_eclipse.xml} and
+ *    {@code src/test/java/brooklyn/util/xstream/compiler_compatibility_oracle.xml},
+ *    generated from {@code org.apache.brooklyn.core.util.xstream.CompilerCompatibilityTest}</p>
+ * <p>JLS 1.1 relevant section, copied verbatim for a lack of reliable URL:</p>
+ * <blockquote>
+ *  <p>Java 1.1 compilers are strongly encouraged, though not required, to use the
+ *     following naming conventions when implementing inner classes. Compilers may
+ *     not use synthetic names of the forms defined here for any other purposes.</p>
+ *  <p>A synthetic field pointing to the outermost enclosing instance is named this$0.
+ *     The next-outermost enclosing instance is this$1, and so forth. (At most one such
+ *     field is necessary in any given inner class.) A synthetic field containing a copy
+ *     of a constant v is named val$v. These fields are final.</p>
+ * </blockquote>
+ * <p>Currently available at
+ *    http://web.archive.org/web/20000830111107/http://java.sun.com/products/jdk/1.1/docs/guide/innerclasses/spec/innerclasses.doc10.html</p>
+ */
+public class CompilerIndependentOuterClassFieldMapper extends MapperWrapper implements Caching {
+    public static final Logger LOG = LoggerFactory.getLogger(CompilerIndependentOuterClassFieldMapper.class);
+
+    private static final String OUTER_CLASS_FIELD_PREFIX = "this$";
+
+    private final Map<String, Collection<String>> classOuterFields = new ConcurrentHashMap<String, Collection<String>>();
+
+    public CompilerIndependentOuterClassFieldMapper(Mapper wrapped) {
+        super(wrapped);
+        classOuterFields.put(Object.class.getName(), Collections.<String>emptyList());
+    }
+
+    @Override
+    public String realMember(@SuppressWarnings("rawtypes") Class type, String serialized) {
+        // Let com.thoughtworks.xstream.mapper.OuterClassMapper also run on the input.
+        String serializedFieldName = super.realMember(type, serialized);
+
+        if (serializedFieldName.startsWith(OUTER_CLASS_FIELD_PREFIX)) {
+            Collection<String> compiledFieldNames = findOuterClassFieldNames(type);
+            if (compiledFieldNames.size() == 0) {
+                throw new IllegalStateException("Unable to find any outer class fields in " + type + ", searching specifically for " + serializedFieldName);
+            }
+
+            Set<String> uniqueFieldNames = new HashSet<String>(compiledFieldNames);
+            String deserializeFieldName;
+            if (!compiledFieldNames.contains(serializedFieldName)) {
+                String msg =
+                        "Unable to find outer class field " + serializedFieldName + " in class " + type + ". " +
+                        "This could be caused by " +
+                        "1) changing the class (or one of its parents) to a static or " +
+                        "2) moving the class to a different lexical level (enclosing classes) or " +
+                        "3) using a different compiler (i.e eclipse vs oracle) at the time the object was serialized. ";
+                if (uniqueFieldNames.size() == 1) {
+                    // Try to fix the field naming only for the case with a single field or 
+                    // multiple fields with the same name, in which case XStream puts defined-in
+                    // for the field declared in super.
+                    //
+                    // We don't have access to the XML elements from here to check for same name
+                    // so we check the target class instead. This should work most of the time, but
+                    // if code is recompiled in such a way that the new instance has fields with
+                    // different names, where only the field of the extending class is renamed and
+                    // the super field is not, then the instance will be deserialized incorrectly -
+                    // the super field will be assigned both times. If the field type is incompatible
+                    // then a casting exception will be thrown, if it's the same then only the warning
+                    // below will indicate of a possible problem down the line - most probably NPE on
+                    // the this$N field.
+                    deserializeFieldName = compiledFieldNames.iterator().next();
+                    LOG.warn(msg + "Will use the field " + deserializeFieldName + " instead.");
+                } else {
+                    // Multiple fields with differing names case - don't try to fix it.
+                    // Better fail with an explicit error, and have someone fix it manually,
+                    // than try to fix it here non-reliably and have it fail down the line
+                    // with some unrelated error.
+                    // XStream will fail later with a field not found exception.
+                    LOG.error(msg + "Will fail with a field not found exception. " +
+                            "Edit the persistence state manually and update the field names. "+
+                            "Existing field names are " + uniqueFieldNames);
+                    deserializeFieldName = serializedFieldName;
+                }
+            } else {
+                if (uniqueFieldNames.size() > 1) {
+                    // Log at debug level as the actual problem would occur in very specific cases. Only
+                    // useful when the compiler is changed, otherwise leads to false positives.
+                    LOG.debug("Deserializing the non-static class " + type + " with multiple outer class fields " + uniqueFieldNames + ". " +
+                            "When changing compilers it's possible that the instance won't be able to be deserialized due to changed outer class field names. " +
+                            "In those cases deserialization could fail with field not found exception or class cast exception following this log line.");
+                }
+                deserializeFieldName = serializedFieldName;
+            }
+
+            return deserializeFieldName;
+        } else {
+            return serializedFieldName;
+        }
+    }
+    
+    private Collection<String> findOuterClassFieldNames(Class<?> type) {
+        Collection<String> fields = classOuterFields.get(type.getName());
+        if (fields == null) {
+            fields = new ArrayList<String>();
+            addOuterClassFields(type, fields);
+            classOuterFields.put(type.getName(), fields);
+        }
+        return fields;
+    }
+    
+    private void addOuterClassFields(Class<?> type, Collection<String> fields) {
+        for (Field field : type.getDeclaredFields()) {
+            if (field.isSynthetic()) {
+                fields.add(field.getName());
+            }
+        }
+        if (type.getSuperclass() != null) {
+            addOuterClassFields(type.getSuperclass(), fields);
+        }
+    }
+
+    @Override
+    public void flushCache() {
+        classOuterFields.keySet().retainAll(Collections.singletonList(Object.class.getName()));
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/EnumCaseForgivingConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/EnumCaseForgivingConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/EnumCaseForgivingConverter.java
new file mode 100644
index 0000000..7d95568
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/EnumCaseForgivingConverter.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import brooklyn.util.exceptions.Exceptions;
+
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.converters.enums.EnumConverter;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+
+/** ... except this doesn't seem to get applied when we think it should
+ * (normal xstream.resgisterConverter doesn't apply to enums) */
+public class EnumCaseForgivingConverter extends EnumConverter {
+
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+        Class type = context.getRequiredType();
+        if (type.getSuperclass() != Enum.class) {
+            type = type.getSuperclass(); // polymorphic enums
+        }
+        String token = reader.getValue();
+        // this is the new bit (overriding superclass to accept case-insensitive)
+        return resolve(type, token);
+    }
+
+    public static <T extends Enum<T>> T resolve(Class<T> type, String token) {
+        try {
+            return Enum.valueOf(type, token.toUpperCase());
+        } catch (Exception e) {
+            
+            // new stuff here:  try reading case insensitive
+            
+            Exceptions.propagateIfFatal(e);
+            try {
+                for (T v: type.getEnumConstants())
+                    if (v.name().equalsIgnoreCase(token)) return v;
+                throw e;
+            } catch (Exception e2) {
+                throw Exceptions.propagate(e2);
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/EnumCaseForgivingSingleValueConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/EnumCaseForgivingSingleValueConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/EnumCaseForgivingSingleValueConverter.java
new file mode 100644
index 0000000..4bc507c
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/EnumCaseForgivingSingleValueConverter.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import com.thoughtworks.xstream.converters.enums.EnumSingleValueConverter;
+
+public class EnumCaseForgivingSingleValueConverter extends EnumSingleValueConverter {
+
+    private final Class enumType;
+
+    public EnumCaseForgivingSingleValueConverter(Class type) {
+        super(type);
+        enumType = type;
+    }
+
+    public Object fromString(String str) {
+        return EnumCaseForgivingConverter.resolve(enumType, str);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableListConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableListConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableListConverter.java
new file mode 100644
index 0000000..a59cfa8
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableListConverter.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import java.util.Collection;
+
+import com.google.common.base.Predicates;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.converters.collections.CollectionConverter;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.mapper.Mapper;
+
+public class ImmutableListConverter extends CollectionConverter {
+
+    public ImmutableListConverter(Mapper mapper) {
+        super(mapper);
+    }
+
+    @Override
+    public boolean canConvert(@SuppressWarnings("rawtypes") Class type) {
+        return ImmutableList.class.isAssignableFrom(type);
+    }
+
+    // marshalling is the same
+    // so is unmarshalling the entries
+
+    // only differences are creating the overarching collection, which we do after the fact
+    // (optimizing format on disk as opposed to in-memory), and we discard null values 
+    // to avoid failing entirely.
+    public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+        Collection<?> collection = Lists.newArrayList();
+        populateCollection(reader, context, collection);
+        return ImmutableList.copyOf(Iterables.filter(collection, Predicates.notNull()));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableMapConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableMapConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableMapConverter.java
new file mode 100644
index 0000000..00e44dd
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableMapConverter.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import java.util.Map;
+import java.util.Map.Entry;
+
+import com.google.common.base.Predicate;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.mapper.Mapper;
+
+public class ImmutableMapConverter extends MapConverter {
+
+    public ImmutableMapConverter(Mapper mapper) {
+        super(mapper);
+    }
+
+    @Override
+    public boolean canConvert(@SuppressWarnings("rawtypes") Class type) {
+        return ImmutableMap.class.isAssignableFrom(type);
+    }
+
+    // marshalling is the same
+    // so is unmarshalling the entries
+
+    // only differences are creating the overarching collection, which we do after the fact
+    // (optimizing format on disk as opposed to in-memory), and we discard null key/values 
+    // to avoid failing entirely.
+    public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+        Map<?, ?> map = Maps.newLinkedHashMap();
+        populateMap(reader, context, map);
+        return ImmutableMap.copyOf(Maps.filterEntries(map, new Predicate<Map.Entry<?,?>>() {
+                @Override public boolean apply(Entry<?, ?> input) {
+                    return input != null && input.getKey() != null && input.getValue() != null;
+                }}));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableSetConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableSetConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableSetConverter.java
new file mode 100644
index 0000000..76975ff
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/ImmutableSetConverter.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import java.util.Collection;
+
+import com.google.common.base.Predicates;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.converters.collections.CollectionConverter;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.mapper.Mapper;
+
+public class ImmutableSetConverter extends CollectionConverter {
+
+    public ImmutableSetConverter(Mapper mapper) {
+        super(mapper);
+    }
+
+    @Override
+    public boolean canConvert(@SuppressWarnings("rawtypes") Class type) {
+        return ImmutableSet.class.isAssignableFrom(type);
+    }
+
+    // marshalling is the same
+    // so is unmarshalling the entries
+
+    // only differences are creating the overarching collection, which we do after the fact
+    // (optimizing format on disk as opposed to in-memory), and we discard null values 
+    // to avoid failing entirely.
+    public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+        Collection<?> collection = Lists.newArrayList();
+        populateCollection(reader, context, collection);
+        return ImmutableSet.copyOf(Iterables.filter(collection, Predicates.notNull()));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/Inet4AddressConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/Inet4AddressConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/Inet4AddressConverter.java
new file mode 100644
index 0000000..53133c1
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/Inet4AddressConverter.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import java.net.Inet4Address;
+import java.net.UnknownHostException;
+
+import brooklyn.util.exceptions.Exceptions;
+
+import com.thoughtworks.xstream.converters.Converter;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+
+public class Inet4AddressConverter implements Converter {
+
+    @Override
+    public boolean canConvert(@SuppressWarnings("rawtypes") Class type) {
+        return type.equals(Inet4Address.class);
+    }
+
+    @Override
+    public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+        Inet4Address addr = (Inet4Address) source;
+        writer.setValue(addr.getHostName()+"/"+addr.getHostAddress());
+    }
+
+    @Override
+    public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+        String hostSlashAddress = reader.getValue();
+        int i = hostSlashAddress.indexOf('/');
+        try {
+            if (i==-1) {
+                return Inet4Address.getByName(hostSlashAddress);
+            } else {
+                String host = hostSlashAddress.substring(0, i);
+                String addrS = hostSlashAddress.substring(i+1);
+                byte[] addr = new byte[4];
+                String[] addrSI = addrS.split("\\.");
+                for (int k=0; k<4; k++) addr[k] = (byte)(int)Integer.valueOf(addrSI[k]);
+                return Inet4Address.getByAddress(host, addr);
+            }
+        } catch (UnknownHostException e) {
+            throw Exceptions.propagate(e);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/MapConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/MapConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/MapConverter.java
new file mode 100644
index 0000000..752ef70
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/MapConverter.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import java.util.ConcurrentModificationException;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.core.ReferencingMarshallingContext;
+import com.thoughtworks.xstream.io.ExtendedHierarchicalStreamWriterHelper;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import com.thoughtworks.xstream.mapper.Mapper;
+
+/** equivalent to super, but cleaner methods, overridable, logging, and some retries */
+public class MapConverter extends com.thoughtworks.xstream.converters.collections.MapConverter {
+
+    private static final Logger log = LoggerFactory.getLogger(MapConverter.class);
+    
+    public MapConverter(Mapper mapper) {
+        super(mapper);
+    }
+
+    @SuppressWarnings({ "rawtypes" })
+    public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+        Map map = (Map) source;
+        try {
+            for (Iterator iterator = map.entrySet().iterator(); iterator.hasNext();) {
+                Map.Entry entry = (Map.Entry) iterator.next();
+                marshalEntry(writer, context, entry);
+            }
+        } catch (ConcurrentModificationException e) {
+            log.debug("Map "
+                // seems there is no non-deprecated way to get the path...
+                + (context instanceof ReferencingMarshallingContext ? "at "+((ReferencingMarshallingContext)context).currentPath() : "")
+                + "["+source+"] modified while serializing; will fail, and retry may be attempted");
+            throw e;
+            // would be nice to attempt to re-serialize being slightly more defensive, as code below;
+            // but the code above may have written partial data so that is dangerous, we could have corrupted output. 
+            // if we could mark and restore in the output stream then we could do this below (but we don't have that in our stream),
+            // or we could try this copying code in the first instance (but that's slow);
+            // error is rare most of the time (e.g. attribute being updated) so we bail on this whole attempt
+            // and simply try serializing the map-owner (e.g. an entity) again.
+//            ImmutableList entries = ImmutableList.copyOf(map.entrySet());
+//            for (Iterator iterator = entries.iterator(); iterator.hasNext();) {
+//                Map.Entry entry = (Map.Entry) iterator.next();
+//                marshalEntry(writer, context, entry);                
+//            }
+        }
+    }
+
+    protected String getEntryNodeName() { return mapper().serializedClass(Map.Entry.class); }
+    
+    protected void marshalEntry(HierarchicalStreamWriter writer, MarshallingContext context, Map.Entry entry) {
+        ExtendedHierarchicalStreamWriterHelper.startNode(writer, getEntryNodeName(), Map.Entry.class);
+
+        writeItem(entry.getKey(), context, writer);
+        writeItem(entry.getValue(), context, writer);
+
+        writer.endNode();
+    }
+
+    protected void populateMap(HierarchicalStreamReader reader, UnmarshallingContext context, Map map) {
+        while (reader.hasMoreChildren()) {
+            reader.moveDown();
+            unmarshalEntry(reader, context, map);
+            reader.moveUp();
+        }
+    }
+
+    protected void unmarshalEntry(HierarchicalStreamReader reader, UnmarshallingContext context, Map map) {
+        reader.moveDown();
+        Object key = readItem(reader, context, map);
+        reader.moveUp();
+
+        reader.moveDown();
+        Object value = readItem(reader, context, map);
+        reader.moveUp();
+
+        map.put(key, value);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/MutableSetConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/MutableSetConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/MutableSetConverter.java
new file mode 100644
index 0000000..4b7bcbc
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/MutableSetConverter.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import brooklyn.util.collections.MutableSet;
+
+import com.thoughtworks.xstream.converters.collections.CollectionConverter;
+import com.thoughtworks.xstream.mapper.Mapper;
+
+public class MutableSetConverter extends CollectionConverter {
+
+    // Although this class seems pointless (!), without registering an explicit converter for MutableSet then the
+    // declaration for Set interferes, causing MutableSet.map field to be null on deserialization.
+    
+    public MutableSetConverter(Mapper mapper) {
+        super(mapper);
+    }
+
+    @Override
+    public boolean canConvert(@SuppressWarnings("rawtypes") Class type) {
+        return MutableSet.class.isAssignableFrom(type);
+    }
+
+    @Override
+    protected Object createCollection(Class type) {
+        return new MutableSet<Object>();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/StringKeyMapConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/StringKeyMapConverter.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/StringKeyMapConverter.java
new file mode 100644
index 0000000..86a4cd0
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/StringKeyMapConverter.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.brooklyn.core.util.flags.TypeCoercions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import brooklyn.util.collections.MutableMap;
+import brooklyn.util.exceptions.Exceptions;
+import brooklyn.util.text.Identifiers;
+
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.ExtendedHierarchicalStreamWriterHelper;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import com.thoughtworks.xstream.mapper.Mapper;
+
+/** converter which simplifies representation of a map for string-based keys,
+ * to <key>value</key>, or <entry key="key" type="string">value</entry> 
+ * @author alex
+ *
+ */
+public class StringKeyMapConverter extends MapConverter {
+
+    private static final Logger log = LoggerFactory.getLogger(StringKeyMapConverter.class);
+    
+    // full stop is technically allowed ... goes against "best practice" ... 
+    // but simplifies property maps, and is used elsewhere in xstream's repn
+    final static String VALID_XML_NODE_NAME_CHARS = Identifiers.JAVA_GOOD_NONSTART_CHARS + ".";
+
+    final static String VALID_XML_NODE_NAME_START_CHARS = Identifiers.JAVA_GOOD_START_CHARS + ".";
+
+    public StringKeyMapConverter(Mapper mapper) {
+        super(mapper);
+    }
+    
+    protected boolean isKeyValidForNodeName(String key) {
+        // return false to always write as <entry key="key" ...; otherwise only use that when key is not valid xml
+        return Identifiers.isValidToken(key, VALID_XML_NODE_NAME_START_CHARS, VALID_XML_NODE_NAME_CHARS);
+    }
+    
+    public boolean canConvert(Class type) {
+        return super.canConvert(type) || type.getName().equals(MutableMap.class.getName());
+    }
+    
+    @Override
+    protected void marshalEntry(HierarchicalStreamWriter writer, MarshallingContext context, Entry entry) {
+        if (entry.getKey() instanceof String) {
+            marshalStringKey(writer, context, entry);
+        } else {
+            super.marshalEntry(writer, context, entry);
+        }
+    }
+    
+    protected void marshalStringKey(HierarchicalStreamWriter writer, MarshallingContext context, Entry entry) {
+        String key = (String)entry.getKey();
+        String entryNodeName = getEntryNodeName();
+        boolean useKeyAsNodeName = (!key.equals(entryNodeName) && isKeyValidForNodeName(key));
+        if (useKeyAsNodeName) entryNodeName = key;
+        ExtendedHierarchicalStreamWriterHelper.startNode(writer, entryNodeName, Map.Entry.class);
+        if (!useKeyAsNodeName)
+            writer.addAttribute("key", key);
+        
+        Object value = entry.getValue();
+        if (entry.getValue()!=null && isInlineableType(value.getClass())) {
+            if (!(value instanceof String))
+                writer.addAttribute("type", mapper().serializedClass(entry.getValue().getClass()));
+            if (entry.getValue().getClass().isEnum())
+                writer.setValue(((Enum)entry.getValue()).name());
+            else
+                writer.setValue(""+entry.getValue());
+        } else {
+            writeItem(entry.getValue(), context, writer);
+        }
+        
+        writer.endNode();
+    }
+
+    protected boolean isInlineableType(Class<?> type) {
+        return TypeCoercions.isPrimitiveOrBoxer(type) || String.class.equals(type) || type.isEnum();
+    }
+    
+    @Override
+    protected void unmarshalEntry(HierarchicalStreamReader reader, UnmarshallingContext context, Map map) {
+        String key = reader.getNodeName(); 
+        if (key.equals(getEntryNodeName())) key = reader.getAttribute("key");
+        if (key==null) {
+            super.unmarshalEntry(reader, context, map);
+        } else {
+            unmarshalStringKey(reader, context, map, key);
+        }
+    }
+
+    protected void unmarshalStringKey(HierarchicalStreamReader reader, UnmarshallingContext context, Map map, String key) {
+        String type = reader.getAttribute("type");
+        Object value;
+        if (type==null && reader.hasMoreChildren()) {
+            reader.moveDown();
+            value = readItem(reader, context, map);
+            reader.moveUp();
+        } else {
+            Class typeC = type!=null ? mapper().realClass(type) : String.class;
+            try {
+                value = TypeCoercions.coerce(reader.getValue(), typeC);
+            } catch (Exception e) {
+                log.warn("FAILED to coerce "+reader.getValue()+" to "+typeC+": "+e);
+                throw Exceptions.propagate(e);
+            }
+        }
+        map.put(key, value);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-brooklyn/blob/699b3f65/core/src/main/java/org/apache/brooklyn/core/util/xstream/XmlSerializer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/brooklyn/core/util/xstream/XmlSerializer.java b/core/src/main/java/org/apache/brooklyn/core/util/xstream/XmlSerializer.java
new file mode 100644
index 0000000..3b3b582
--- /dev/null
+++ b/core/src/main/java/org/apache/brooklyn/core/util/xstream/XmlSerializer.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.brooklyn.core.util.xstream;
+
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+
+import brooklyn.util.collections.MutableList;
+import brooklyn.util.collections.MutableMap;
+import brooklyn.util.collections.MutableSet;
+
+import com.google.common.collect.ImmutableList;
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.mapper.MapperWrapper;
+
+public class XmlSerializer<T> {
+
+    protected final XStream xstream;
+    
+    public XmlSerializer() {
+        xstream = new XStream() {
+            @Override
+            protected MapperWrapper wrapMapper(MapperWrapper next) {
+                MapperWrapper result = super.wrapMapper(next);
+                return XmlSerializer.this.wrapMapper(result);
+            }
+        };
+        
+        // list as array list is default
+        xstream.alias("map", Map.class, LinkedHashMap.class);
+        xstream.alias("set", Set.class, LinkedHashSet.class);
+        
+        xstream.registerConverter(new StringKeyMapConverter(xstream.getMapper()), /* priority */ 10);
+        xstream.alias("MutableMap", MutableMap.class);
+        xstream.alias("MutableSet", MutableSet.class);
+        xstream.alias("MutableList", MutableList.class);
+        
+        // Needs an explicit MutableSet converter!
+        // Without it, the alias for "set" seems to interfere with the MutableSet.map field, so it gets
+        // a null field on deserialization.
+        xstream.registerConverter(new MutableSetConverter(xstream.getMapper()));
+        
+        xstream.aliasType("ImmutableList", ImmutableList.class);
+        xstream.registerConverter(new ImmutableListConverter(xstream.getMapper()));
+        xstream.registerConverter(new ImmutableSetConverter(xstream.getMapper()));
+        xstream.registerConverter(new ImmutableMapConverter(xstream.getMapper()));
+
+        xstream.registerConverter(new EnumCaseForgivingConverter());
+        xstream.registerConverter(new Inet4AddressConverter());
+    }
+    
+    protected MapperWrapper wrapMapper(MapperWrapper next) {
+        return new CompilerIndependentOuterClassFieldMapper(next);
+    }
+
+    public void serialize(Object object, Writer writer) {
+        xstream.toXML(object, writer);
+    }
+
+    @SuppressWarnings("unchecked")
+    public T deserialize(Reader xml) {
+        return (T) xstream.fromXML(xml);
+    }
+
+    public String toString(T memento) {
+        Writer writer = new StringWriter();
+        serialize(memento, writer);
+        return writer.toString();
+    }
+
+    public T fromString(String xml) {
+        return deserialize(new StringReader(xml));
+    }
+
+}