You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@uima.apache.org by ch...@apache.org on 2013/09/11 21:17:12 UTC

svn commit: r1521995 [2/2] - in /uima/sandbox/uima-ducc/trunk: src/main/admin/ uima-ducc-cli/src/main/java/org/apache/uima/ducc/cli/ uima-ducc-cli/src/main/java/org/apache/uima/ducc/cli/aio/ uima-ducc-common/src/main/java/org/apache/uima/ducc/common/ u...

Modified: uima/sandbox/uima-ducc/trunk/uima-ducc-rm/src/main/java/org/apache/uima/ducc/rm/scheduler/Scheduler.java
URL: http://svn.apache.org/viewvc/uima/sandbox/uima-ducc/trunk/uima-ducc-rm/src/main/java/org/apache/uima/ducc/rm/scheduler/Scheduler.java?rev=1521995&r1=1521994&r2=1521995&view=diff
==============================================================================
--- uima/sandbox/uima-ducc/trunk/uima-ducc-rm/src/main/java/org/apache/uima/ducc/rm/scheduler/Scheduler.java (original)
+++ uima/sandbox/uima-ducc/trunk/uima-ducc-rm/src/main/java/org/apache/uima/ducc/rm/scheduler/Scheduler.java Wed Sep 11 19:17:11 2013
@@ -18,22 +18,20 @@
 */
 package org.apache.uima.ducc.rm.scheduler;
 
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 
-import org.apache.uima.ducc.common.IIdentity;
 import org.apache.uima.ducc.common.Node;
+import org.apache.uima.ducc.common.NodeConfiguration;
 import org.apache.uima.ducc.common.NodeIdentity;
 import org.apache.uima.ducc.common.Pair;
 import org.apache.uima.ducc.common.utils.DuccLogger;
 import org.apache.uima.ducc.common.utils.DuccProperties;
+import org.apache.uima.ducc.common.utils.DuccPropertiesResolver;
 import org.apache.uima.ducc.common.utils.SystemPropertyResolver;
 import org.apache.uima.ducc.common.utils.Version;
 import org.apache.uima.ducc.common.utils.id.DuccId;
@@ -60,34 +58,35 @@ public class Scheduler
     String ducc_home;
     // Integer epoch = 5;                                                 // scheduling epoch, seconds
 
-    NodePool nodepool;
+    NodePool[] nodepools;
 
     //
     // Fair-share and fixed-share use shares only, not machines
     //
-    HashMap<DuccId, Share> busyShares        = new HashMap<DuccId, Share>(); // Running "fair" share jobs
+    Map<DuccId, Share> busyShares        = new HashMap<DuccId, Share>(); // Running "fair" share jobs
 
     // incoming reports of machines that are now free
-    HashMap<DuccId, Pair<IRmJob, Share>> vacatedShares= new HashMap<DuccId, Pair<IRmJob, Share>>();
+    Map<DuccId, Pair<IRmJob, Share>> vacatedShares= new HashMap<DuccId, Pair<IRmJob, Share>>();
     // boolean growthOccurred = false;                                           // don't care which grew, just that something grew
 
-    ArrayList<IRmJob>        incomingJobs    = new ArrayList<IRmJob>();       // coming in from external world but not added our queues yet
-    ArrayList<IRmJob>        recoveredJobs   = new ArrayList<IRmJob>();       // coming in from external world but we don't now about them, (hopefully
-                                                                              //    because we crashed and not for more nefarious reasons)
-    ArrayList<IRmJob>        completedJobs   = new ArrayList<IRmJob>();       // signaled complete from outside but not yet dealt with
-    ArrayList<IRmJob>        initializedJobs = new ArrayList<IRmJob>();       // Init is complete so we can begin full (un)fair share allocation
+    List<IRmJob>        incomingJobs    = new ArrayList<IRmJob>();       // coming in from external world but not added our queues yet
+    List<IRmJob>        recoveredJobs   = new ArrayList<IRmJob>();       // coming in from external world but we don't now about them, (hopefully
+                                                                         //    because we crashed and not for more nefarious reasons)
+    List<IRmJob>        completedJobs   = new ArrayList<IRmJob>();       // signaled complete from outside but not yet dealt with
+    List<IRmJob>        initializedJobs = new ArrayList<IRmJob>();       // Init is complete so we can begin full (un)fair share allocation
 
     //HashMap<Node, Node> incomingNodes  = new HashMap<Node, Node>();         // node updates
-    HashMap<Node, Node> deadNodes      = new HashMap<Node, Node>();           // missed too many heartbeats
-    HashMap<Node, Node> allNodes       = new HashMap<Node, Node>();           // the guys we know
+    Map<Node, Node> deadNodes      = new HashMap<Node, Node>();           // missed too many heartbeats
+    // HashMap<Node, Node> allNodes       = new HashMap<Node, Node>();           // the guys we know
+    Map<String, NodePool>    nodepoolsByNode = new HashMap<String, NodePool>(); // all nodes, and their associated pool
 
-    HashMap<String, User>    users     = new HashMap<String, User>();         // Active users - has a job in the system
+    Map<String, User>    users     = new HashMap<String, User>();         // Active users - has a job in the system
     //HashMap<DuccId, IRmJob>    runningJobs = new HashMap<DuccId, IRmJob>();
 
-    HashMap<DuccId, IRmJob>  allJobs = new HashMap<DuccId, IRmJob>();
+    Map<DuccId, IRmJob>  allJobs = new HashMap<DuccId, IRmJob>();
 
-    HashMap<ResourceClass, ResourceClass> resourceClasses = new HashMap<ResourceClass, ResourceClass>();
-    HashMap<String, ResourceClass> resourceClassesByName = new HashMap<String, ResourceClass>();
+    Map<ResourceClass, ResourceClass> resourceClasses = new HashMap<ResourceClass, ResourceClass>();
+    Map<String, ResourceClass> resourceClassesByName = new HashMap<String, ResourceClass>();
 
     String defaultClassName = null;
     int defaultNThreads = 1;
@@ -96,7 +95,7 @@ public class Scheduler
 
     // these two are initialized in constructor
     String schedImplName;
-    IScheduler scheduler;
+    IScheduler[] schedulers;
 
     long share_quantum    = 16;             // 16 GB in KB - smallest share size
     long share_free_dram  = 0;              // 0  GB in KB  - minim memory after shares are allocated
@@ -146,7 +145,7 @@ public class Scheduler
         String ep         = SystemPropertyResolver.getStringProperty("ducc.rm.eviction.policy", "SHRINK_BY_MACHINE");
         evictionPolicy    = EvictionPolicy.valueOf(ep);        
 
-        nodepool          = new NodePool(null, evictionPolicy, 0);   // global nodepool
+        // nodepool          = new NodePool(null, evictionPolicy, 0);   // global nodepool
         share_quantum     = SystemPropertyResolver.getLongProperty("ducc.rm.share.quantum", share_quantum) * 1024 * 1024;        // GB -> KB
         share_free_dram   = SystemPropertyResolver.getLongProperty("ducc.rm.reserved.dram", share_free_dram) * 1024 * 1024;   // GB -> KB
         ducc_home         = SystemPropertyResolver.getStringProperty("DUCC_HOME");
@@ -167,20 +166,24 @@ public class Scheduler
 
         idFactory = new DuccIdFactory(1);
 
-        try {
-            schedImplName = SystemPropertyResolver.getStringProperty("ducc.rm.scheduler", "org.apache.uima.ducc.rm.ClassBasedScheduler");
-            @SuppressWarnings("unchecked")
-			Class<IScheduler> cl = (Class<IScheduler>) Class.forName(schedImplName);
-            scheduler = (IScheduler) cl.newInstance();
-        } catch (ClassNotFoundException e) {
-            throw new SchedulingException(null, "Cannot find class " + schedImplName);
-        } catch (InstantiationException e) {
-            throw new SchedulingException(null, "Cannot instantiate class " + schedImplName);            
-        } catch (IllegalAccessException e) {
-            throw new SchedulingException(null, "Cannot instantiate class " + schedImplName + ": can't access constructor.");            
-        }
+//        try {
+//            schedImplName = SystemPropertyResolver.getStringProperty("ducc.rm.scheduler", "org.apache.uima.ducc.rm.ClassBasedScheduler");
+//            @SuppressWarnings("unchecked")
+//			Class<IScheduler> cl = (Class<IScheduler>) Class.forName(schedImplName);
+//            scheduler = (IScheduler) cl.newInstance();
+//        } catch (ClassNotFoundException e) {
+//            throw new SchedulingException(null, "Cannot find class " + schedImplName);
+//        } catch (InstantiationException e) {
+//            throw new SchedulingException(null, "Cannot instantiate class " + schedImplName);            
+//        } catch (IllegalAccessException e) {
+//            throw new SchedulingException(null, "Cannot instantiate class " + schedImplName + ": can't access constructor.");            
+//        }
 
-        String class_definitions = SystemPropertyResolver.getStringProperty("ducc.rm.class.definitions", "scheduler.classes");
+        String class_definitions = SystemPropertyResolver
+            .getStringProperty(DuccPropertiesResolver
+                               .ducc_rm_class_definitions, "scheduler.classes");
+
+        class_definitions = System.getProperty("DUCC_HOME") + "/resources/" + class_definitions;
         try {
             initClasses(class_definitions);
         } catch ( Exception e ) {
@@ -190,9 +193,9 @@ public class Scheduler
 
         // we share most of the state with the actual scheduling code - no need to keep passing this around
         // TODO: Make sure these are all Sialized correctly
-        scheduler.setEvictionPolicy(evictionPolicy);
-        scheduler.setClasses(resourceClasses);
-        scheduler.setNodePool(nodepool);
+//         scheduler.setEvictionPolicy(evictionPolicy);
+//         scheduler.setClasses(resourceClasses);
+//         scheduler.setNodePool(nodepools[0]);
 
         logger.info(methodName, null, "Scheduler running with share quantum           : ", (share_quantum / (1024*1024)), " GB");
         logger.info(methodName, null, "                       reserved DRAM           : ", (share_free_dram / (1024*1024)), " GB");
@@ -237,8 +240,14 @@ public class Scheduler
         return initialized;
     }
 
+    public Machine getMachine(Node n)
+    {
+        return getMachine(n.getNodeIdentity());
+    }
+
     public Machine getMachine(NodeIdentity ni)
     {
+        NodePool nodepool = getNodepoolByName(ni);
     	return nodepool.getMachine(ni);        
     }
 
@@ -307,142 +316,231 @@ public class Scheduler
         return share_order;
     }
 
+//     /**
+//      * Use the NodeIdentity to infer my the domain name.
+//      *
+//      * Itertate through the possible names - if one of them has a '.'
+//      * the we have to assume the following stuff is the domain name.
+//      * We only get one such name, so we give up the search if we find
+//      * it.
+//      */
+//     static String cached_domain = null;
+//     private String getDomainName()
+//     {
+//     	String methodName = "getDomainName";
+
+//         String answer = System.getProperty("SIM_RM_DOMAIN");       // for the simulation wrapper, to replay logs from other domains correctly
+//         if ( answer != null ) {
+//             return answer;
+//         }
+
+//         if ( cached_domain != null ) return cached_domain;
+//         try {
+// 			NodeIdentity ni   = new NodeIdentity();
+// 			for ( IIdentity id : ni.getNodeIdentities()) {
+// 			    String n = id.getName();
+// 			    int ndx = n.indexOf(".");
+// 			    if ( ndx > 0 ) {
+// 			        cached_domain =  n.substring(ndx + 1);
+//                     return cached_domain;
+// 			    }
+// 			}
+// 		} catch (Exception e) {
+// 			// TODO Auto-generated catch block
+// 			logger.warn(methodName, null, "Cannot create my own node identity:", e);
+// 		}
+//         return null;  // crappy config if this happens, some stuff may not match nodepools and
+//                       // nothing to do about it.
+//     }
+
+//     Map<String, String> readNodepoolFile(String npfile)
+//     {
+//         String methodName = "readNodepoolFile";
+//         String my_domain = getDomainName();
+//         String ducc_home = System.getProperty("DUCC_HOME");
+//         npfile = ducc_home + "/resources/" + npfile;
+
+//         logger.info(methodName, null, "Domain name:", my_domain);
+//         Map<String, String> response = new HashMap<String, String>();
+
+//         try {
+//             BufferedReader br = new BufferedReader(new FileReader(npfile));
+//             String node = "";
+//             while ( (node = br.readLine()) != null ) {
+//                 int ndx = node.indexOf("#");
+//                 if ( ndx >= 0 ) {
+//                     node = node.substring(0, ndx);
+//                 }
+//                 node = node.trim();
+//                 if (node.equals("") ) {
+//                     continue;
+//                 }
+
+//                 if ( node.startsWith("import") ) {
+//                     String[] tmp = node.split("\\s");
+//                     response.putAll(readNodepoolFile(tmp[1]));
+//                     continue;
+//                 }
+//                 response.put(node, node);
+
+//                 // include fully and non-fully qualified names to allow sloppiness of config
+//                 ndx = node.indexOf(".");
+//                 String dnode;
+//                 if ( ndx >= 0 ) {
+//                     dnode = node.substring(0, ndx);
+//                     response.put(dnode, dnode);
+//                 } else if ( my_domain != null ) {
+//                     dnode = node + "." + my_domain;
+//                     response.put(dnode, dnode);
+//                 }
+//             }
+//             br.close();                        
+            
+//         } catch (FileNotFoundException e) {
+//             throw new SchedulingException(null, "Cannot open NodePool file \"" + npfile + "\": file not found.");
+//         } catch (IOException e) {
+//             throw new SchedulingException(null, "Cannot read NodePool file \"" + npfile + "\": I/O Error.");
+//         }
+                
+//         return response;
+//     }
+
     /**
-     * Use the NodeIdentity to infer my the domain name.
-     *
-     * Itertate through the possible names - if one of them has a '.'
-     * the we have to assume the following stuff is the domain name.
-     * We only get one such name, so we give up the search if we find
-     * it.
+     * Collect all the classes served by the indicated nodepool (property set).  This fills
+     * in the 'ret' map from the parameter 'dp' and recursive calls to the children in dp.
+
+     * @param dp This is the properties object from the configurator for a top-level
+     *            nodepool.
+     * @param ret This is the map to be filled in by this routine.
      */
-    static String cached_domain = null;
-    private String getDomainName()
+    void getClassesForNodepool(DuccProperties dp, Map<ResourceClass, ResourceClass> ret)
     {
-    	String methodName = "getDomainName";
-
-        String answer = System.getProperty("SIM_RM_DOMAIN");       // for the simulation wrapper, to replay logs from other domains correctly
-        if ( answer != null ) {
-            return answer;
+        @SuppressWarnings("unchecked")
+		List<DuccProperties> class_set = (List<DuccProperties>) dp.get("classes");
+        if ( class_set != null ) {
+            for ( DuccProperties cl : class_set ) {
+                ResourceClass rc = resourceClassesByName.get(cl.getStringProperty("name"));
+                ret.put(rc, rc);
+            }
         }
 
-        if ( cached_domain != null ) return cached_domain;
-        try {
-			NodeIdentity ni   = new NodeIdentity();
-			for ( IIdentity id : ni.getNodeIdentities()) {
-			    String n = id.getName();
-			    int ndx = n.indexOf(".");
-			    if ( ndx > 0 ) {
-			        cached_domain =  n.substring(ndx + 1);
-                    return cached_domain;
-			    }
-			}
-		} catch (Exception e) {
-			// TODO Auto-generated catch block
-			logger.warn(methodName, null, "Cannot create my own node identity:", e);
-		}
-        return null;  // crappy config if this happens, some stuff may not match nodepools and
-                      // nothing to do about it.
+        @SuppressWarnings("unchecked")
+		List<DuccProperties> children = (List<DuccProperties>) dp.get("children");
+        if ( children != null ) {
+            for (DuccProperties child : children ) {
+                getClassesForNodepool(child, ret);
+            }
+        }        
     }
 
-    Map<String, String> readNodepoolFile(String npfile)
+    /**
+     * Map each node by name into the nodepool it belongs to
+     */
+    void mapNodesToNodepool(Map<String, String> nodes, NodePool pool)
     {
-        String methodName = "readNodepoolFile";
-        String my_domain = getDomainName();
-        String ducc_home = System.getProperty("DUCC_HOME");
-        npfile = ducc_home + "/resources/" + npfile;
+        if ( nodes == null ) return;
 
-        logger.info(methodName, null, "Domain name:", my_domain);
-        Map<String, String> response = new HashMap<String, String>();
+        for ( String s : nodes.keySet() ) {
+             nodepoolsByNode.put(s, pool);
+        }
+    }
 
-        try {
-            BufferedReader br = new BufferedReader(new FileReader(npfile));
-            String node = "";
-            while ( (node = br.readLine()) != null ) {
-                int ndx = node.indexOf("#");
-                if ( ndx >= 0 ) {
-                    node = node.substring(0, ndx);
-                }
-                node = node.trim();
-                if (node.equals("") ) {
-                    continue;
-                }
+    /**
+     * (Recursively) build up the heirarchy under the parent nodepool.
+     */
+    void createSubpools(NodePool parent, List<DuccProperties> children)
+    {
+        if ( children == null ) return;
 
-                if ( node.startsWith("import") ) {
-                    String[] tmp = node.split("\\s");
-                    response.putAll(readNodepoolFile(tmp[1]));
-                    continue;
-                }
-                response.put(node, node);
+        for ( DuccProperties dp : children ) {
+            String id = dp.getStringProperty("name");
+            @SuppressWarnings("unchecked")
+			Map<String, String> nodes = (Map<String, String>) dp.get("nodes");
+            NodePool child = parent.createSubpool(id, nodes, 0);
+            mapNodesToNodepool(nodes, child);
 
-                // include fully and non-fully qualified names to allow sloppiness of config
-                ndx = node.indexOf(".");
-                String dnode;
-                if ( ndx >= 0 ) {
-                    dnode = node.substring(0, ndx);
-                    response.put(dnode, dnode);
-                } else if ( my_domain != null ) {
-                    dnode = node + "." + my_domain;
-                    response.put(dnode, dnode);
-                }
-            }
-            br.close();                        
-            
-        } catch (FileNotFoundException e) {
-            throw new SchedulingException(null, "Cannot open NodePool file \"" + npfile + "\": file not found.");
-        } catch (IOException e) {
-            throw new SchedulingException(null, "Cannot read NodePool file \"" + npfile + "\": I/O Error.");
+            @SuppressWarnings("unchecked")
+			List<DuccProperties> grandkids = (List<DuccProperties>) dp.get("children");
+            createSubpools(child, grandkids);            
         }
-                
-        return response;
     }
 
     void initClasses(String filename)
-        throws Exception
     {
-        String methodName = "initClasses";
-        DuccProperties props = new DuccProperties();
-        props.load(ducc_home + "/resources/" + filename);
+    	String methodName = "initClasses";
+        String me = Scheduler.class.getName() + ".Config";
+        DuccLogger initLogger = new DuccLogger(me, COMPONENT_NAME);
+        NodeConfiguration nc = new NodeConfiguration(filename, initLogger);
+		try {
+			nc.readConfiguration();
+		} catch (Throwable e) {
+            logger.error(methodName, null, e);
+            logger.error(methodName, null, "Scheduler exits: unable to read configuration.");
+            System.exit(1);
+		}
 
-        defaultClassName = props.getProperty("scheduling.default.name");
+        nc.printConfiguration();
 
-        // read in nodepools
-        String npn = props.getProperty("scheduling.nodepool");
-        if ( npn != null ) {
-            String[] npnames = npn.split("\\s+");
-            for ( String nodepoolName : npnames ) {
-                int nporder = props.getIntProperty("scheduling.nodepool." + nodepoolName + ".order", 100);                
-                String npfile = props.getProperty("scheduling.nodepool." + nodepoolName).trim();
-                Map<String,String> npnodes = readNodepoolFile(npfile);                
-                nodepool.createSubpool(nodepoolName, npnodes, nporder);                    
-//                 } catch (FileNotFoundException e) {
-//                     throw new SchedulingException(null, "Cannot open NodePool file \"" + npfile + "\": file not found.");
-//                 } catch (IOException e) {
-//                     throw new SchedulingException(null, "Cannot read NodePool file \"" + npfile + "\": I/O Error.");
-//                 }
-            }
-        }
-        
-        // read in the class definitions
-        String cn = props.getProperty("scheduling.class_set");
-        if ( cn == null ) {
-            throw new SchedulingException(null, "No class definitions found, scheduler cannot start.");
-        }
-        
-        String[] classNames = cn.split("\\s+");
+        DuccProperties[] nps = nc.getToplevelNodepools();
+        Map<String, DuccProperties> cls = nc.getClasses();
+
+        nodepools = new NodePool[nps.length];                   // top-level nodepools
+        schedulers = new IScheduler[nps.length];                // a schedler for each top-level nodepool
+
+        // Here build up the ResourceClass definitions
         logger.info(methodName, null, "Classes:");
         logger.info(methodName, null, ResourceClass.getHeader());
         logger.info(methodName, null, ResourceClass.getDashes());
-        for ( String n : classNames ) {
-        	n = n.trim();
-            ResourceClass rc = new ResourceClass(n); //, nodepool.getMachinesByName(), nodepool.getMachinesByIp());
-            rc.init(props);
+        for ( DuccProperties props : cls.values() ) {
+            ResourceClass rc = new ResourceClass(props);
             resourceClasses.put(rc, rc);
-            resourceClassesByName.put(n, rc);
+            resourceClassesByName.put(rc.getName(), rc);
             logger.info(methodName, null, rc.toString());
         }
+
+        // Instatntiate one scheduler per top-level nodepool
+        try {
+            schedImplName = SystemPropertyResolver.getStringProperty("ducc.rm.scheduler", "org.apache.uima.ducc.rm.ClassBasedScheduler");
+            @SuppressWarnings("unchecked")
+			Class<IScheduler> cl = (Class<IScheduler>) Class.forName(schedImplName);
+            for ( int i = 0; i < nps.length; i++ ) {
+                schedulers[i] = (IScheduler) cl.newInstance();
+                schedulers[i].setEvictionPolicy(evictionPolicy);
+            }
+
+        } catch (ClassNotFoundException e) {
+            throw new SchedulingException(null, "Cannot find class " + schedImplName);
+        } catch (InstantiationException e) {
+            throw new SchedulingException(null, "Cannot instantiate class " + schedImplName);            
+        } catch (IllegalAccessException e) {
+            throw new SchedulingException(null, "Cannot instantiate class " + schedImplName + ": can't access constructor.");            
+        }
+
+        // Here create the nodepool configuration
+        for ( int i = 0; i < nps.length; i++ ) {
+            DuccProperties np = nps[i];
+            String id = np.getStringProperty("name");
+            @SuppressWarnings("unchecked")
+			Map<String, String> nodes = (Map<String, String>) np.get("nodes");
+            nodepools[i] = new NodePool(null, id, nodes, evictionPolicy, 0, 0);
+            schedulers[i].setNodePool(nodepools[i]);                    // set its top-level nodepool
+
+            mapNodesToNodepool(nodes, nodepools[i]);
+            logger.info(methodName, null, "Created top-level nodepool", id);
+
+            @SuppressWarnings("unchecked")
+			List<DuccProperties> children = (List<DuccProperties>) np.get("children");
+            createSubpools(nodepools[i], children);
+
+            Map<ResourceClass, ResourceClass> classesForNp = new HashMap<ResourceClass, ResourceClass>();
+            getClassesForNodepool(np, classesForNp);           // all classes served by this heirarchy - fills in classesForNp
+
+            schedulers[i].setClasses(classesForNp);
+        }
+
     }
 
-    void initClassesX(String filename)
+    void initClassesOld(String filename)
         throws Exception
     {
         String methodName = "initClasses";
@@ -450,52 +548,23 @@ public class Scheduler
         props.load(ducc_home + "/resources/" + filename);
 
         defaultClassName = props.getProperty("scheduling.default.name");
-        String my_domain = getDomainName();
 
         // read in nodepools
         String npn = props.getProperty("scheduling.nodepool");
         if ( npn != null ) {
-            String[] npnames = npn.split(" ");
+            String[] npnames = npn.split("\\s+");
             for ( String nodepoolName : npnames ) {
-                int nporder = props.getIntProperty("scheduling.nodepool." + nodepoolName + ".order", 100);                
-                String npfile = props.getProperty("scheduling.nodepool." + nodepoolName).trim();
-                try {
-                    String ducc_home = System.getProperty("DUCC_HOME");
-                    npfile = ducc_home + "/resources/" + npfile;
-                    BufferedReader br = new BufferedReader(new FileReader(npfile));
-                    String node = "";
-                    HashMap<String, String> npnodes = new HashMap<String, String>();
-                    while ( (node = br.readLine()) != null ) {
-                        int ndx = node.indexOf("#");
-                        if ( ndx >0 ) {
-                            node = node.substring(0, ndx);
-                        }
-                        node = node.trim();
-                        if (node.equals("") ) {
-                            continue;
-                        }
-
-                        npnodes.put(node, node);
-
-                        // include fully and non-fully qualified names to allow sloppiness of config
-                        ndx = node.indexOf(".");
-                        String dnode;
-                        if ( ndx >= 0 ) {
-                            dnode = node.substring(0, ndx);
-                            npnodes.put(dnode, dnode);
-                        } else if ( my_domain != null ) {
-                            dnode = node + "." + my_domain;
-                            npnodes.put(dnode, dnode);
-                        }
-                    }
-                    br.close();                        
-                    nodepool.createSubpool(nodepoolName, npnodes, nporder);
-                    
-                } catch (FileNotFoundException e) {
-                    throw new SchedulingException(null, "Cannot open NodePool file \"" + npfile + "\": file not found.");
-                } catch (IOException e) {
-                    throw new SchedulingException(null, "Cannot read NodePool file \"" + npfile + "\": I/O Error.");
-                }
+                @SuppressWarnings("unused")
+				int nporder = props.getIntProperty("scheduling.nodepool." + nodepoolName + ".order", 100);                
+                @SuppressWarnings("unused")
+				String npfile = props.getProperty("scheduling.nodepool." + nodepoolName).trim();
+                // jrc  Map<String,String> npnodes = readNodepoolFile(npfile);                
+                // jrc nodepool.createSubpool(nodepoolName, npnodes, nporder);                    
+//                 } catch (FileNotFoundException e) {
+//                     throw new SchedulingException(null, "Cannot open NodePool file \"" + npfile + "\": file not found.");
+//                 } catch (IOException e) {
+//                     throw new SchedulingException(null, "Cannot read NodePool file \"" + npfile + "\": I/O Error.");
+//                 }
             }
         }
         
@@ -505,22 +574,20 @@ public class Scheduler
             throw new SchedulingException(null, "No class definitions found, scheduler cannot start.");
         }
         
-        String[] classNames = cn.split("\\s+");
+//        String[] classNames = cn.split("\\s+");
         logger.info(methodName, null, "Classes:");
         logger.info(methodName, null, ResourceClass.getHeader());
         logger.info(methodName, null, ResourceClass.getDashes());
-        for ( String n : classNames ) {
-        	n = n.trim();
-            ResourceClass rc = new ResourceClass(n); //, nodepool.getMachinesByName(), nodepool.getMachinesByIp());
-            rc.init(props);
-            resourceClasses.put(rc, rc);
-            resourceClassesByName.put(n, rc);
-            logger.info(methodName, null, rc.toString());
-        }
+//        for ( String n : classNames ) {
+//        	n = n.trim();
+//            ResourceClass rc = new ResourceClass(n); //, nodepool.getMachinesByName(), nodepool.getMachinesByIp());
+//            rc.init(props);
+//            resourceClasses.put(rc, rc);
+//            resourceClassesByName.put(n, rc);
+//            logger.info(methodName, null, rc.toString());
+//        }
     }
 
-
-
     /**
      * Called only from schedule, under the 'this' monitor.
      *
@@ -639,7 +706,7 @@ public class Scheduler
             logger.trace(methodName, j.getId(), "<<<<<<<<<<");
         }
 
-        jmu.setAllJobs(allJobs);
+        jmu.setAllJobs((HashMap<DuccId, IRmJob>)allJobs);
 
         jobs = upd.getRefusedJobs();
         Iterator<IRmJob> iter = jobs.values().iterator();
@@ -682,7 +749,7 @@ public class Scheduler
         synchronized(this) {
 
             for ( Node n : nodeUpdates.values() ) {
-                Machine m = nodepool.getMachine(n);
+                Machine m = getMachine(n);
 
                 if ( m == null ) {
                     // must have been removed because of earlier missed hb
@@ -725,7 +792,7 @@ public class Scheduler
         logger.info("nodeArrives", null, "Total arrivals:", total_arrivals);
 
         handleDeadNodes();
-        nodepool.reset(NodePool.getMaxOrder());
+        resetNodepools();
 
         // TODO: Can we combine these two into one?
         SchedulingUpdate upd = new SchedulingUpdate();              // state from internal scheduler
@@ -875,7 +942,10 @@ public class Scheduler
             }
 
             logger.info(methodName, null, "Scheduling " + newJobs.size(), " new jobs.  Existing jobs: " + allJobs.size());
-            scheduler.schedule(upd);
+            for ( int i = 0; i < schedulers.length; i++ ) {
+                logger.info(methodName, null, "Run scheduler", i, "with top-level nodepool", nodepools[i].getId());
+                schedulers[i].schedule(upd);
+            }
 
             logger.info(methodName, null, "--------------- Scheduler returns ---------------");
             logger.info(methodName, null, "\n", upd.toString());
@@ -908,6 +978,23 @@ public class Scheduler
 //         }
 //     }
 
+    //
+    // Return a nodepool by Node.  If the node can't be associated with a nodepool, return the
+    // default nodepool, which is always the first one defined in the config file.
+    //
+    NodePool getNodepoolByName(NodeIdentity ni)
+    {
+        NodePool np = nodepoolsByNode.get( ni.getName() );
+        if ( np == null ) {
+            np = nodepoolsByNode.get( ni.getIp() );
+        }
+        if ( np == null ) {
+            np = nodepools[0];
+            nodepoolsByNode.put( ni.getName(), np);          // assign this guy to the default np
+        }
+        return np;
+    }
+
     private int total_arrivals = 0;
     public void nodeArrives(Node node)
     {        
@@ -918,11 +1005,12 @@ public class Scheduler
         synchronized(this) {
             // the amount of memory available for shares, adjusted with configured overhead
 
-            Machine m = nodepool.getMachine(node);
+            NodePool np = getNodepoolByName(node.getNodeIdentity());
+            Machine m = np.getMachine(node);
             int share_order = 0;
 
             if ( m == null ) {
-                allNodes.put(node, node);
+                // allNodes.put(node, node);
                 long allocatable_mem =  node.getNodeMetrics().getNodeMemory().getMemTotal() - share_free_dram;
                 if ( dramOverride > 0 ) {
                     allocatable_mem = dramOverride;
@@ -932,15 +1020,8 @@ public class Scheduler
                 share_order = m.getShareOrder();
             }
             
-            m = nodepool.nodeArrives(node, share_order);                         // announce to the nodepools
-            // m.heartbeat_down();
-            // logger.info(methodName, null, "Node arrives:", m.getId());                                                              // make SURE it's reset ok
+            m = np.nodeArrives(node, share_order);                         // announce to the nodepools
         }
-
-        // The second block registers it in the heartbeat map
-//        synchronized(incomingNodes) {
-//            incomingNodes.put(node, node);
-//        }
     }
 
     public void nodeDeath(Map<Node, Node> nodes)
@@ -1087,7 +1168,9 @@ public class Scheduler
      */
     public void resetNodepools()
     {
-        nodepool.reset(NodePool.getMaxOrder());
+        for ( NodePool np : nodepools ) {
+            np.reset(NodePool.getMaxOrder());
+        }
     }
 
     /**
@@ -1162,7 +1245,9 @@ public class Scheduler
 
     public void queryMachines()
     {
-        nodepool.queryMachines();
+        for ( NodePool np : nodepools ) {
+            np.queryMachines();
+        }
     }
 
     class MachineByOrderSorter

Modified: uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandler.java
URL: http://svn.apache.org/viewvc/uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandler.java?rev=1521995&r1=1521994&r2=1521995&view=diff
==============================================================================
--- uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandler.java (original)
+++ uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandler.java Wed Sep 11 19:17:11 2013
@@ -27,11 +27,13 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.text.DecimalFormat;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.Date;
 import java.util.Enumeration;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.TimeZone;
@@ -42,9 +44,9 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.uima.ducc.cli.ws.json.MachineFacts;
 import org.apache.uima.ducc.cli.ws.json.MachineFactsList;
+import org.apache.uima.ducc.common.NodeConfiguration;
 import org.apache.uima.ducc.common.authentication.AuthenticationManager;
 import org.apache.uima.ducc.common.authentication.IAuthenticationManager;
 import org.apache.uima.ducc.common.boot.DuccDaemonRuntimeProperties;
@@ -2204,8 +2206,11 @@ public class DuccHandler extends DuccAbs
 		duccLogger.trace(methodName, null, messages.fetch("exit"));
 	}
 	
+    /**
+     * @Deprecated
+     */
 	private void handleServletJsonSystemClassesData(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
-	throws IOException, ServletException
+	throws Exception
 	{
 		String methodName = "handleJsonServletSystemClassesData";
 		duccLogger.trace(methodName, null, messages.fetch("enter"));
@@ -2214,14 +2219,15 @@ public class DuccHandler extends DuccAbs
 		sb.append("\"aaData\": [ ");
 		
 		DuccSchedulerClasses schedulerClasses = new DuccSchedulerClasses();
-		DuccProperties properties = schedulerClasses.getClasses();
-		String class_set = properties.getProperty("scheduling.class_set");
-		class_set.trim();
+        Map<String, DuccProperties> clmap = schedulerClasses.getClasses();
+
 		boolean first = true;
-		if(class_set != null) {
-			String[] class_array = StringUtils.split(class_set);
-			for(int i=0; i<class_array.length; i++) {
-				String class_name = class_array[i].trim();
+		if( clmap != null ) {
+            DuccProperties[] class_set = clmap.values().toArray(new DuccProperties[clmap.size()]);
+            Arrays.sort(class_set, new NodeConfiguration.ClassSorter());
+
+			for( DuccProperties cl : class_set ) {
+				String class_name = cl.getProperty("name");
 				if(first) {
 					first = false;
 				}
@@ -2232,15 +2238,15 @@ public class DuccHandler extends DuccAbs
 				sb.append(quote(class_name));
 				sb.append(",");
 
-                String policy = properties.getStringProperty("scheduling.class."+class_name+".policy");
+                String policy = cl.getProperty("policy");
 				sb.append(quote(policy));
 				sb.append(",");
-				sb.append(quote(properties.getStringProperty("scheduling.class."+class_name+".share_weight", "100")));
+				sb.append(quote(cl.getStringProperty("weight", "-")));
 				sb.append(",");
-				sb.append(quote(properties.getStringProperty("scheduling.class."+class_name+".priority")));
+				sb.append(quote(cl.getProperty("priority")));
 				// cap is either absolute or proportional.  if proprotional, it ends with '%'.  It's always
                 // either-or so at least one of these columns will have N/A
-				String val = properties.getStringProperty("scheduling.class."+class_name+".cap", "0");
+				String val = cl.getStringProperty("cap", "0");
 				if( (val == null) || val.equals("0") ) {
                     sb.append(",");
     				sb.append(quote("-"));
@@ -2257,38 +2263,43 @@ public class DuccHandler extends DuccAbs
     				sb.append(",");
     				sb.append(quote(val));
                 }
-				val = properties.getStringProperty("scheduling.class."+class_name+".initialization.cap", System.getProperty("ducc.rm.initialization.cap"));
-				if ( val == null ) {
-                    val = "2";
+
+                if ( policy.equals("FAIR_SHARE") ) {
+                    val = cl.getStringProperty("initialization-cap", System.getProperty("ducc.rm.initialization.cap"));
+                    if ( val == null ) {
+                        val = "2";
+                    }
+                    sb.append(",");
+                    sb.append(quote(val));
+                    boolean bval = cl.getBooleanProperty("expand-by-doubling", true);
+                    Boolean b = new Boolean(bval);
+                    sb.append(",");
+                    sb.append(quote(b.toString()));
+                    val = cl.getStringProperty("use-prediction", System.getProperty("ducc.rm.prediction"));
+                    if ( val == null ) {
+                        val = "true";
+                    }
+                    sb.append(",");
+                    sb.append(quote(val));
+                    val = cl.getStringProperty("prediction-fudge", System.getProperty("ducc.rm.prediction.fudge"));
+                    if ( val == null ) {
+                        val = "10000";
+                    }
+                    sb.append(",");
+                    sb.append(quote(val));
+                } else {
+                    sb.append(",-,-,-,-"); 
                 }
-				sb.append(",");
-				sb.append(quote(val));
-				boolean bval = properties.getBooleanProperty("scheduling.class."+class_name+".expand.by.doubling", true);
-				Boolean b = new Boolean(bval);
-				sb.append(",");
-				sb.append(quote(b.toString()));
-				val = properties.getStringProperty("scheduling.class."+class_name+".prediction", System.getProperty("ducc.rm.prediction"));
-				if ( val == null ) {
-					val = "true";
-				}
-				sb.append(",");
-				sb.append(quote(val));
-				val = properties.getStringProperty("scheduling.class."+class_name+".prediction.fudge", System.getProperty("ducc.rm.prediction.fudge"));
-				if ( val == null ) {
-					val = "10000";
-				}
-				sb.append(",");
-				sb.append(quote(val));
 
                 // max for reserve in in machines.  For fixed is in processes.  No max on fair-share. So slightly
                 // ugly code here.
                 if ( policy.equals("RESERVE") ) {
-                    val = properties.getStringProperty("scheduling.class."+class_name+".max_machines", "0");
+                    val = cl.getProperty("max-machines");
                     if( val == null || val.equals("0")) {
                         val = "-";
                     }
                 } else if ( policy.equals("FIXED_SHARE") ) {
-                    val = properties.getStringProperty("scheduling.class."+class_name+".max_processes", "0");
+                    val = cl.getStringProperty("max-properties");
                     if( val == null || val.equals("0")) {
                         val = "-";
                     }
@@ -2298,7 +2309,7 @@ public class DuccHandler extends DuccAbs
 
 				sb.append(",");
 				sb.append(quote(val));
-				val = properties.getStringProperty("scheduling.class."+class_name+".nodepool", "--global--");
+				val = cl.getStringProperty("nodepool");
 				sb.append(",");
 				sb.append(quote(val));
 				sb.append("]");
@@ -2569,7 +2580,7 @@ public class DuccHandler extends DuccAbs
 	}	
 	
 	private void handleDuccServletReservationSchedulingClasses(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
-	throws IOException, ServletException
+	throws Exception
 	{
 		String methodName = "handleDuccServletReservationSchedulingCLasses";
 		duccLogger.trace(methodName, null, messages.fetch("enter"));
@@ -3449,7 +3460,7 @@ public class DuccHandler extends DuccAbs
 	}		
 	
 	private void handleDuccRequest(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
-	throws IOException, ServletException
+	throws Exception
 	{
 		String methodName = "handleDuccRequest";
 		duccLogger.trace(methodName, null, messages.fetch("enter"));

Modified: uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandlerJsonFormat.java
URL: http://svn.apache.org/viewvc/uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandlerJsonFormat.java?rev=1521995&r1=1521994&r2=1521995&view=diff
==============================================================================
--- uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandlerJsonFormat.java (original)
+++ uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandlerJsonFormat.java Wed Sep 11 19:17:11 2013
@@ -22,9 +22,11 @@ import java.io.File;
 import java.io.IOException;
 import java.text.DecimalFormat;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 import java.util.ListIterator;
+import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.TreeMap;
@@ -34,13 +36,13 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.uima.ducc.cli.ws.json.MachineFacts;
 import org.apache.uima.ducc.cli.ws.json.MachineFactsList;
 import org.apache.uima.ducc.cli.ws.json.NodePidList;
 import org.apache.uima.ducc.cli.ws.json.ReservationFacts;
 import org.apache.uima.ducc.cli.ws.json.ReservationFactsList;
 import org.apache.uima.ducc.common.IDuccEnv;
+import org.apache.uima.ducc.common.NodeConfiguration;
 import org.apache.uima.ducc.common.boot.DuccDaemonRuntimeProperties;
 import org.apache.uima.ducc.common.boot.DuccDaemonRuntimeProperties.DaemonName;
 import org.apache.uima.ducc.common.internationalization.Messages;
@@ -1355,7 +1357,7 @@ public class DuccHandlerJsonFormat exten
 	}		
 	
 	private void handleServletJsonFormatClassesAaData(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
-	throws IOException, ServletException
+	throws Exception
 	{
 		String methodName = "handleServletJsonFormatClassesAaData";
 		duccLogger.trace(methodName, jobid, messages.fetch("enter"));
@@ -1363,33 +1365,34 @@ public class DuccHandlerJsonFormat exten
 		JsonObject jsonResponse = new JsonObject();
 		JsonArray data = new JsonArray();
 		JsonArray row;
-		
+
 		DuccSchedulerClasses schedulerClasses = new DuccSchedulerClasses();
-		DuccProperties properties = schedulerClasses.getClasses();
-		String class_set = properties.getProperty("scheduling.class_set");
-		class_set.trim();
-		if(class_set != null) {
-			String[] class_array = StringUtils.split(class_set);
-			for(int i=0; i<class_array.length; i++) {
+        Map<String, DuccProperties> clmap = schedulerClasses.getClasses();
+		        
+		if( clmap != null ) {
+            DuccProperties[] class_set = clmap.values().toArray(new DuccProperties[clmap.size()]);
+            Arrays.sort(class_set, new NodeConfiguration.ClassSorter());            
+
+			for( DuccProperties cl : class_set ) {
 				row = new JsonArray();
-				String class_name = class_array[i].trim();
+				String class_name = cl.getProperty("name");
 				// Name
 				row.add(new JsonPrimitive(class_name));
 				// Policy
-                String policy = properties.getStringProperty("scheduling.class."+class_name+".policy");
+                String policy = cl.getProperty("policy");
                 row.add(new JsonPrimitive(policy));
                 // Weight
-                String weight = properties.getStringProperty("scheduling.class."+class_name+".share_weight", "100");
+                String weight = cl.getStringProperty("weight", "-");
                 row.add(new JsonPrimitive(weight));
                 // Priority
-                String priority = properties.getStringProperty("scheduling.class."+class_name+".priority");
+                String priority = cl.getProperty("priority");
                 row.add(new JsonPrimitive(priority));
                 
                 // cap is either absolute or proportional.  if proprotional, it ends with '%'.  It's always
                 // either-or so at least one of these columns will have N/A
                 
                 // Relative & Absolute Caps
-				String val = properties.getStringProperty("scheduling.class."+class_name+".cap", "0");
+				String val = cl.getStringProperty("cap", "0");
 				if( (val == null) || val.equals("0") ) {
 					row.add(new JsonPrimitive("-"));
 					row.add(new JsonPrimitive("-"));
@@ -1403,27 +1406,34 @@ public class DuccHandlerJsonFormat exten
 					row.add(new JsonPrimitive(val));
                 }
 
-				// Initialization Cap
-				String defaultInitializationCap = "2";
-				val = properties.getStringProperty("scheduling.class."+class_name+".initialization.cap", 
-                                                   System.getProperty("ducc.rm.initialization.cap",defaultInitializationCap));
-				row.add(new JsonPrimitive(val));
-
-				// Expand-by-Doubling
-				boolean bval = properties.getBooleanProperty("scheduling.class."+class_name+".expand.by.doubling", true);
-				row.add(new JsonPrimitive(bval));
-
-				// Use Prediction
-				String defaultUsePrediction = "true";
-				val = properties.getStringProperty("scheduling.class."+class_name+".prediction", 
-                                                   System.getProperty("ducc.rm.prediction",defaultUsePrediction));
-				row.add(new JsonPrimitive(val));
-				
-				// Prediction Fudge
-				String defaultPredictionFudge = "10000";
-				val = properties.getStringProperty("scheduling.class."+class_name+".prediction.fudge",
-                                                   System.getProperty("ducc.rm.prediction.fudge",defaultPredictionFudge));
-				row.add(new JsonPrimitive(val));
+                if ( policy.equals("FAIR_SHARE") ) {
+                    // Initialization Cap
+                    String defaultInitializationCap = "2";
+                    val = cl.getStringProperty("initialization-cap",
+                                               System.getProperty("ducc.rm.initialization.cap",defaultInitializationCap));
+                    row.add(new JsonPrimitive(val));
+                    
+                    // Expand-by-Doubling
+                    boolean bval = cl.getBooleanProperty("expand-by-doubling", true);
+                    row.add(new JsonPrimitive(bval));
+                    
+                    // Use Prediction
+                    String defaultUsePrediction = "true";
+                    val = cl.getProperty("use-prediction",
+                                         System.getProperty("ducc.rm.prediction",defaultUsePrediction));
+                    row.add(new JsonPrimitive(val));
+                    
+                    // Prediction Fudge
+                    String defaultPredictionFudge = "10000";
+                    val = cl.getStringProperty("prediction-fudge",
+                                               System.getProperty("ducc.rm.prediction.fudge",defaultPredictionFudge));
+                    row.add(new JsonPrimitive(val));
+                } else {
+                    row.add(new JsonPrimitive("-"));
+                    row.add(new JsonPrimitive("-"));
+                    row.add(new JsonPrimitive("-"));
+                    row.add(new JsonPrimitive("-"));
+                }
 
                 // max for reserve in in machines.  For fixed is in processes.  No max on fair-share. So slightly
                 // ugly code here.
@@ -1431,12 +1441,12 @@ public class DuccHandlerJsonFormat exten
 				// Max Allocation 
 				
                 if ( policy.equals("RESERVE") ) {
-                    val = properties.getStringProperty("scheduling.class."+class_name+".max_machines", "0");
+                    val = cl.getStringProperty("max-machines");
                     if( val == null || val.equals("0")) {
                         val = "-";
                     }
                 } else if ( policy.equals("FIXED_SHARE") ) {
-                    val = properties.getStringProperty("scheduling.class."+class_name+".max_processes", "0");
+                    val = cl.getStringProperty("max-processes");
                     if( val == null || val.equals("0")) {
                         val = "-";
                     }
@@ -1444,30 +1454,24 @@ public class DuccHandlerJsonFormat exten
 					val = "-";
                 }
 
-				val = properties.getStringProperty("scheduling.class."+class_name+".max_shares", "0");
+				val = cl.getStringProperty("max-shares", "0");
 				if( val == null || val.equals("0")) {
 					val = "-";
 				}
 				row.add(new JsonPrimitive(val));
 
 				// Nodepool
-				val = properties.getStringProperty("scheduling.class."+class_name+".nodepool", "--global--");
+				val = cl.getProperty("nodepool");
 				row.add(new JsonPrimitive(val));
 				
 				// Debug
 				val = "-";
 				if(schedulerClasses.isPreemptable(class_name)) {
 					if(schedulerClasses.isPreemptable(class_name)) {
-						String v1 = properties.getStringProperty("scheduling.class."+class_name+".debug", "");
+						String v1 = cl.getStringProperty("debug", "");
 						if(!v1.equals("")) {
 							val = v1;
 						}
-						else {
-							String v2 = properties.getStringProperty("scheduling.default.name.debug", "");
-							if(!v2.equals("")) {
-								val = "["+v2+"]";
-							}
-						}
 					}
 				}
 				row.add(new JsonPrimitive(val));
@@ -1804,7 +1808,7 @@ public class DuccHandlerJsonFormat exten
 	}
 	
 	private void handleDuccRequest(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
-	throws IOException, ServletException
+	throws Exception
 	{
 		String methodName = "handleDuccRequest";
 		duccLogger.trace(methodName, jobid, messages.fetch("enter"));

Modified: uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandlerLegacy.java
URL: http://svn.apache.org/viewvc/uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandlerLegacy.java?rev=1521995&r1=1521994&r2=1521995&view=diff
==============================================================================
--- uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandlerLegacy.java (original)
+++ uima/sandbox/uima-ducc/trunk/uima-ducc-web/src/main/java/org/apache/uima/ducc/ws/server/DuccHandlerLegacy.java Wed Sep 11 19:17:11 2013
@@ -22,9 +22,11 @@ import java.io.File;
 import java.io.IOException;
 import java.text.DecimalFormat;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 import java.util.ListIterator;
+import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.TreeMap;
@@ -34,10 +36,10 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.uima.ducc.cli.ws.json.MachineFacts;
 import org.apache.uima.ducc.cli.ws.json.MachineFactsList;
 import org.apache.uima.ducc.common.IDuccEnv;
+import org.apache.uima.ducc.common.NodeConfiguration;
 import org.apache.uima.ducc.common.boot.DuccDaemonRuntimeProperties;
 import org.apache.uima.ducc.common.boot.DuccDaemonRuntimeProperties.DaemonName;
 import org.apache.uima.ducc.common.internationalization.Messages;
@@ -1031,40 +1033,42 @@ public class DuccHandlerLegacy extends D
 	}
 	
 	private void handleServletLegacySystemClasses(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
-	throws IOException, ServletException
+	throws Exception
 	{
 		String methodName = "handleServletLegacySystemClasses";
 		duccLogger.trace(methodName, jobid, messages.fetch("enter"));
 		StringBuffer sb = new StringBuffer();
-		
+
+        
 		DuccSchedulerClasses schedulerClasses = new DuccSchedulerClasses();
-		DuccProperties properties = schedulerClasses.getClasses();
-		String class_set = properties.getProperty("scheduling.class_set");
-		class_set.trim();
-		if(class_set != null) {
-			String[] class_array = StringUtils.split(class_set);
-			for(int i=0; i<class_array.length; i++) {
-				String class_name = class_array[i].trim();
+        Map<String, DuccProperties> clmap = schedulerClasses.getClasses();
+		if ( clmap != null ) {
+            DuccProperties[] class_set = clmap.values().toArray(new DuccProperties[clmap.size()]);
+            Arrays.sort(class_set, new NodeConfiguration.ClassSorter());
+            int i = 0;
+
+            for ( DuccProperties cl : class_set) {
+				String class_name = cl.getProperty("name");
 				sb.append(trGet(i+1));
 				sb.append("<td>");
 				sb.append(class_name);
 				sb.append("</td>");	
 				sb.append("<td>");
 
-                String policy = properties.getStringProperty("scheduling.class."+class_name+".policy");
+                String policy = cl.getProperty("policy");
 				sb.append(policy);
 				sb.append("</td>");	
 				sb.append("<td align=\"right\">");
-				sb.append(properties.getStringProperty("scheduling.class."+class_name+".share_weight", "100"));
+				sb.append(cl.getStringProperty("weight", "-"));
 				sb.append("</td>");	
 				sb.append("<td align=\"right\">");
-				sb.append(properties.getStringProperty("scheduling.class."+class_name+".priority"));
+				sb.append(cl.getProperty("priority"));
 				sb.append("</td>");	
 
                 // cap is either absolute or proportional.  if proprotional, it ends with '%'.  It's always
                 // either-or so at least one of these columns will have N/A
-				String val = properties.getStringProperty("scheduling.class."+class_name+".cap", "0");
-				if( (val == null) || val.equals("0") ) {
+				String val = cl.getProperty("cap");
+				if( (val == null) || val.equals("0") || (Integer.parseInt(val) == Integer.MAX_VALUE) ) {
                     sb.append("<td align=\"right\">");
                     sb.append("-");
                     sb.append("</td>");
@@ -1089,49 +1093,56 @@ public class DuccHandlerLegacy extends D
                     sb.append("</td>");
                 }
 
-				sb.append("<td align=\"right\">");
-				val = properties.getStringProperty("scheduling.class."+class_name+".initialization.cap", 
-                                                   System.getProperty("ducc.rm.initialization.cap"));
-                if ( val == null ) {
-                    val = "2";
-                }
-
-				sb.append(val);
-				sb.append("</td>");	
-
-				sb.append("<td align=\"right\">");
-				boolean bval = properties.getBooleanProperty("scheduling.class."+class_name+".expand.by.doubling", true);
-                sb.append(bval);
-				sb.append("</td>");	
-
-				sb.append("<td align=\"right\">");
-				val = properties.getStringProperty("scheduling.class."+class_name+".prediction", 
-                                                   System.getProperty("ducc.rm.prediction"));
-                if ( val == null ) {
-                    val = "true";
-                }
-                sb.append(val);
-				sb.append("</td>");	
+                if ( policy.equals("FAIR_SHARE") ) {
+                    sb.append("<td align=\"right\">");
+                    val = cl.getStringProperty("initialization-cap",
+                                               System.getProperty("ducc.rm.initialization.cap"));
+                    if ( val == null ) {
+                        val = "2";
+                    }
+                    
+                    sb.append(val);
+                    sb.append("</td>");	
+                    
+                    sb.append("<td align=\"right\">");
+                    String bval = cl.getStringProperty("expand-by-doubling", "-");
+                    sb.append(bval);
+                    sb.append("</td>");	
 
-				sb.append("<td align=\"right\">");
-				val = properties.getStringProperty("scheduling.class."+class_name+".prediction.fudge",
-                                                   System.getProperty("ducc.rm.prediction.fudge"));
-                if ( val == null ) {
-                    val = "10000";
+                    sb.append("<td align=\"right\">");
+                    val = cl.getStringProperty("use-prediction",
+                                               System.getProperty("ducc.rm.prediction"));
+                    if ( val == null ) {
+                        val = "-";
+                    }
+                    sb.append(val);
+                    sb.append("</td>");	
+                    
+                    sb.append("<td align=\"right\">");
+                    val = cl.getStringProperty("prediction-fudge",
+                                               System.getProperty("ducc.rm.prediction.fudge"));
+                    if ( val == null ) {
+                        val = "-"; 
+                    }
+                    sb.append(val);
+                    sb.append("</td>");	
+                } else {
+                    sb.append("<td align=\"right\">-</td>");          // not applicable for non-fair-share
+                    sb.append("<td align=\"right\">-</td>");
+                    sb.append("<td align=\"right\">-</td>");
+                    sb.append("<td align=\"right\">-</td>");
                 }
-                sb.append(val);
-				sb.append("</td>");	
 
                 // max for reserve in in machines.  For fixed is in processes.  No max on fair-share. So slightly
                 // ugly code here.
  				sb.append("<td align=\"right\">");
                 if ( policy.equals("RESERVE") ) {
-                    val = properties.getStringProperty("scheduling.class."+class_name+".max_machines", "0");
+                    val = cl.getProperty("max-machines");
                     if( val == null || val.equals("0")) {
                         val = "-";
                     }
                 } else if ( policy.equals("FIXED_SHARE") ) {
-                    val = properties.getStringProperty("scheduling.class."+class_name+".max_processes", "0");
+                    val = cl.getProperty("max-processes");
                     if( val == null || val.equals("0")) {
                         val = "-";
                     }
@@ -1139,7 +1150,7 @@ public class DuccHandlerLegacy extends D
 					val = "-";
                 }
 
-				val = properties.getStringProperty("scheduling.class."+class_name+".max_shares", "0");
+				val = cl.getProperty("max-shares");
 				if( val == null || val.equals("0")) {
 					val = "-";
 				}
@@ -1147,7 +1158,7 @@ public class DuccHandlerLegacy extends D
 				sb.append("</td>");	
 
 				sb.append("<td align=\"right\">");
-				val = properties.getStringProperty("scheduling.class."+class_name+".nodepool", "--global--");
+				val = cl.getProperty("nodepool");
                 sb.append(val);
 				sb.append("</td>");	
 				
@@ -1155,16 +1166,10 @@ public class DuccHandlerLegacy extends D
 				sb.append("<td align=\"right\">");
 				val = "-";
 				if(schedulerClasses.isPreemptable(class_name)) {
-					String v1 = properties.getStringProperty("scheduling.class."+class_name+".debug", "");
+					String v1 = cl.getStringProperty("debug", "");
 					if(!v1.equals("")) {
 						val = v1;
-					}
-					else {
-						String v2 = properties.getStringProperty("scheduling.default.name.debug", "");
-						if(!v2.equals("")) {
-							val = "["+v2+"]";
-						}
-					}
+					} 
 				}
 				sb.append(val);
 				sb.append("</td>");	
@@ -1178,6 +1183,154 @@ public class DuccHandlerLegacy extends D
 		duccLogger.trace(methodName, jobid, messages.fetch("exit"));
 	}		
 
+	// private void handleServletLegacySystemClassesX(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
+	// throws IOException, ServletException
+	// {
+	// 	String methodName = "handleServletLegacySystemClasses";
+	// 	duccLogger.trace(methodName, jobid, messages.fetch("enter"));
+	// 	StringBuffer sb = new StringBuffer();
+		
+	// 	DuccSchedulerClasses schedulerClasses = new DuccSchedulerClasses();
+	// 	DuccProperties properties = schedulerClasses.getClasses();
+	// 	String class_set = properties.getProperty("scheduling.class_set");
+	// 	class_set.trim();
+	// 	if(class_set != null) {
+	// 		String[] class_array = StringUtils.split(class_set);
+	// 		for(int i=0; i<class_array.length; i++) {
+	// 			String class_name = class_array[i].trim();
+	// 			sb.append(trGet(i+1));
+	// 			sb.append("<td>");
+	// 			sb.append(class_name);
+	// 			sb.append("</td>");	
+	// 			sb.append("<td>");
+
+    //             String policy = properties.getStringProperty("scheduling.class."+class_name+".policy");
+	// 			sb.append(policy);
+	// 			sb.append("</td>");	
+	// 			sb.append("<td align=\"right\">");
+	// 			sb.append(properties.getStringProperty("scheduling.class."+class_name+".share_weight", "100"));
+	// 			sb.append("</td>");	
+	// 			sb.append("<td align=\"right\">");
+	// 			sb.append(properties.getStringProperty("scheduling.class."+class_name+".priority"));
+	// 			sb.append("</td>");	
+
+    //             // cap is either absolute or proportional.  if proprotional, it ends with '%'.  It's always
+    //             // either-or so at least one of these columns will have N/A
+	// 			String val = properties.getStringProperty("scheduling.class."+class_name+".cap", "0");
+	// 			if( (val == null) || val.equals("0") ) {
+    //                 sb.append("<td align=\"right\">");
+    //                 sb.append("-");
+    //                 sb.append("</td>");
+    //                 sb.append("<td align=\"right\">");
+    //                 sb.append("-");
+    //                 sb.append("</td>");
+	// 			} else if ( val.endsWith("%") ) {
+    //                 sb.append("<td align=\"right\">");
+    //                 sb.append(val);
+    //                 sb.append("</td>");
+
+    //                 sb.append("<td align=\"right\">");
+    //                 sb.append("-");
+    //                 sb.append("</td>");
+    //             } else {
+    //                 sb.append("<td align=\"right\">");
+    //                 sb.append("-");
+    //                 sb.append("</td>");
+
+    //                 sb.append("<td align=\"right\">");
+    //                 sb.append(val);
+    //                 sb.append("</td>");
+    //             }
+
+	// 			sb.append("<td align=\"right\">");
+	// 			val = properties.getStringProperty("scheduling.class."+class_name+".initialization.cap", 
+    //                                                System.getProperty("ducc.rm.initialization.cap"));
+    //             if ( val == null ) {
+    //                 val = "2";
+    //             }
+
+	// 			sb.append(val);
+	// 			sb.append("</td>");	
+
+	// 			sb.append("<td align=\"right\">");
+	// 			boolean bval = properties.getBooleanProperty("scheduling.class."+class_name+".expand.by.doubling", true);
+    //             sb.append(bval);
+	// 			sb.append("</td>");	
+
+	// 			sb.append("<td align=\"right\">");
+	// 			val = properties.getStringProperty("scheduling.class."+class_name+".prediction", 
+    //                                                System.getProperty("ducc.rm.prediction"));
+    //             if ( val == null ) {
+    //                 val = "true";
+    //             }
+    //             sb.append(val);
+	// 			sb.append("</td>");	
+
+	// 			sb.append("<td align=\"right\">");
+	// 			val = properties.getStringProperty("scheduling.class."+class_name+".prediction.fudge",
+    //                                                System.getProperty("ducc.rm.prediction.fudge"));
+    //             if ( val == null ) {
+    //                 val = "10000";
+    //             }
+    //             sb.append(val);
+	// 			sb.append("</td>");	
+
+    //             // max for reserve in in machines.  For fixed is in processes.  No max on fair-share. So slightly
+    //             // ugly code here.
+ 	// 			sb.append("<td align=\"right\">");
+    //             if ( policy.equals("RESERVE") ) {
+    //                 val = properties.getStringProperty("scheduling.class."+class_name+".max_machines", "0");
+    //                 if( val == null || val.equals("0")) {
+    //                     val = "-";
+    //                 }
+    //             } else if ( policy.equals("FIXED_SHARE") ) {
+    //                 val = properties.getStringProperty("scheduling.class."+class_name+".max_processes", "0");
+    //                 if( val == null || val.equals("0")) {
+    //                     val = "-";
+    //                 }
+    //             } else {
+	// 				val = "-";
+    //             }
+
+	// 			val = properties.getStringProperty("scheduling.class."+class_name+".max_shares", "0");
+	// 			if( val == null || val.equals("0")) {
+	// 				val = "-";
+	// 			}
+	// 			sb.append(val);
+	// 			sb.append("</td>");	
+
+	// 			sb.append("<td align=\"right\">");
+	// 			val = properties.getStringProperty("scheduling.class."+class_name+".nodepool", "--global--");
+    //             sb.append(val);
+	// 			sb.append("</td>");	
+				
+	// 			// Debug
+	// 			sb.append("<td align=\"right\">");
+	// 			val = "-";
+	// 			if(schedulerClasses.isPreemptable(class_name)) {
+	// 				String v1 = properties.getStringProperty("scheduling.class."+class_name+".debug", "");
+	// 				if(!v1.equals("")) {
+	// 					val = v1;
+	// 				}
+	// 				else {
+	// 					String v2 = properties.getStringProperty("scheduling.default.name.debug", "");
+	// 					if(!v2.equals("")) {
+	// 						val = "["+v2+"]";
+	// 					}
+	// 				}
+	// 			}
+	// 			sb.append(val);
+	// 			sb.append("</td>");	
+
+	// 			sb.append("</tr>");
+	// 		}
+	// 	}
+		
+	// 	duccLogger.debug(methodName, jobid, sb);
+	// 	response.getWriter().println(sb);
+	// 	duccLogger.trace(methodName, jobid, messages.fetch("exit"));
+	// }		
+
 	private void handleServletLegacySystemDaemons(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
 	throws IOException, ServletException
 	{
@@ -1661,7 +1814,7 @@ public class DuccHandlerLegacy extends D
 	}
 	
 	private void handleDuccRequest(String target,Request baseRequest,HttpServletRequest request,HttpServletResponse response) 
-	throws IOException, ServletException
+	throws Exception
 	{
 		String methodName = "handleDuccRequest";
 		duccLogger.trace(methodName, jobid, messages.fetch("enter"));