lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sha...@apache.org
Subject [02/15] lucene-solr:master: SOLR-9735: Initial port of autoscaling work for Solr 7
Date Tue, 06 Jun 2017 04:56:51 GMT
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/AddReplicaSuggester.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/AddReplicaSuggester.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/AddReplicaSuggester.java
new file mode 100644
index 0000000..354851e
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/AddReplicaSuggester.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.util.List;
+
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.cloud.autoscaling.Policy.Suggester;
+
+class AddReplicaSuggester extends Suggester {
+
+  SolrRequest init() {
+    SolrRequest operation = tryEachNode(true);
+    if (operation == null) operation = tryEachNode(false);
+    return operation;
+  }
+
+  SolrRequest tryEachNode(boolean strict) {
+    String coll = (String) hints.get(Hint.COLL);
+    String shard = (String) hints.get(Hint.SHARD);
+    if (coll == null || shard == null)
+      throw new RuntimeException("add-replica requires 'collection' and 'shard'");
+    //iterate through elements and identify the least loaded
+
+    List<Clause.Violation> leastSeriousViolation = null;
+    Integer targetNodeIndex = null;
+    for (int i = getMatrix().size() - 1; i >= 0; i--) {
+      Row row = getMatrix().get(i);
+      if (!isAllowed(row.node, Hint.TARGET_NODE)) continue;
+      Row tmpRow = row.addReplica(coll, shard);
+      tmpRow.violations.clear();
+
+      List<Clause.Violation> errs = testChangedMatrix(strict, getModifiedMatrix(getMatrix(), tmpRow, i));
+      if(!containsNewErrors(errs)) {
+        if(isLessSerious(errs, leastSeriousViolation)){
+          leastSeriousViolation = errs;
+          targetNodeIndex = i;
+        }
+      }
+    }
+
+    if (targetNodeIndex != null) {// there are no rule violations
+      getMatrix().set(targetNodeIndex, getMatrix().get(targetNodeIndex).addReplica(coll, shard));
+      return CollectionAdminRequest
+          .addReplicaToShard(coll, shard)
+          .setNode(getMatrix().get(targetNodeIndex).node);
+    }
+
+    return null;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Cell.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Cell.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Cell.java
new file mode 100644
index 0000000..0f2b24b
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Cell.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.util.Utils;
+
+class Cell implements MapWriter {
+  final int index;
+  final String name;
+  Object val, approxVal;
+
+  Cell(int index, String name, Object val) {
+    this.index = index;
+    this.name = name;
+    this.val = val;
+  }
+
+  Cell(int index, String name, Object val, Object approxVal) {
+    this.index = index;
+    this.name = name;
+    this.val = val;
+    this.approxVal = approxVal;
+  }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    ew.put(name, val);
+  }
+
+  @Override
+  public String toString() {
+    return Utils.toJSONString(this.toMap(new HashMap<>()));
+  }
+
+  public Cell copy() {
+    return new Cell(index, name, val, approxVal);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Clause.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Clause.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Clause.java
new file mode 100644
index 0000000..71ced9e
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Clause.java
@@ -0,0 +1,335 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.solr.cloud.autoscaling.Policy.ReplicaInfo;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
+
+import static java.util.Collections.singletonMap;
+import static org.apache.solr.cloud.autoscaling.Clause.TestStatus.PASS;
+import static org.apache.solr.cloud.autoscaling.Operand.EQUAL;
+import static org.apache.solr.cloud.autoscaling.Operand.GREATER_THAN;
+import static org.apache.solr.cloud.autoscaling.Operand.LESS_THAN;
+import static org.apache.solr.cloud.autoscaling.Operand.NOT_EQUAL;
+import static org.apache.solr.cloud.autoscaling.Operand.WILDCARD;
+import static org.apache.solr.cloud.autoscaling.Policy.ANY;
+import static org.apache.solr.common.params.CoreAdminParams.COLLECTION;
+import static org.apache.solr.common.params.CoreAdminParams.REPLICA;
+import static org.apache.solr.common.params.CoreAdminParams.SHARD;
+
+// a set of conditions in a policy
+public class Clause implements MapWriter, Comparable<Clause> {
+  Map<String, Object> original;
+  Condition collection, shard, replica, tag, globalTag;
+
+  boolean strict = true;
+
+  Clause(Map<String, Object> m) {
+    this.original = m;
+    strict = Boolean.parseBoolean(String.valueOf(m.getOrDefault("strict", "true")));
+    Optional<String> globalTagName = m.keySet().stream().filter(Policy.GLOBAL_ONLY_TAGS::contains).findFirst();
+    if (globalTagName.isPresent()) {
+      globalTag = parse(globalTagName.get(), m);
+      if (m.size() > 2) {
+        throw new RuntimeException("Only one extra tag supported for the tag " + globalTagName.get() + " in " + Utils.toJSONString(m));
+      }
+      tag = parse(m.keySet().stream()
+          .filter(s -> (!globalTagName.get().equals(s) && !IGNORE_TAGS.contains(s)))
+          .findFirst().get(), m);
+    } else {
+      collection = parse(COLLECTION, m);
+      shard = parse(SHARD, m);
+      if(m.get(REPLICA) == null){
+        throw new RuntimeException(StrUtils.formatString("'replica' is required" + Utils.toJSONString(m)));
+      }
+      Condition replica = parse(REPLICA, m);
+      try {
+        int replicaCount = Integer.parseInt(String.valueOf(replica.val));
+        if(replicaCount<0){
+          throw new RuntimeException("replica value sould be non null "+ Utils.toJSONString(m));
+        }
+        this.replica = new Condition(replica.name, replicaCount, replica.op);
+      } catch (NumberFormatException e) {
+        throw new RuntimeException("Only an integer value is supported for replica " + Utils.toJSONString(m));
+      }
+      m.forEach((s, o) -> parseCondition(s, o));
+    }
+    if (tag == null)
+      throw new RuntimeException("Invalid op, must have one and only one tag other than collection, shard,replica " + Utils.toJSONString(m));
+
+  }
+
+  public boolean doesOverride(Clause that) {
+    return (collection.equals(that.collection) &&
+        tag.name.equals(that.tag.name));
+
+  }
+
+  public boolean isPerCollectiontag() {
+    return globalTag == null;
+  }
+
+  void parseCondition(String s, Object o) {
+    if (IGNORE_TAGS.contains(s)) return;
+    if (tag != null) {
+      throw new IllegalArgumentException("Only one tag other than collection, shard, replica is possible");
+    }
+    tag = parse(s, singletonMap(s, o));
+  }
+
+  @Override
+  public int compareTo(Clause that) {
+    try {
+      int v = Integer.compare(this.tag.op.priority, that.tag.op.priority);
+      if (v != 0) return v;
+      if (this.isPerCollectiontag() && that.isPerCollectiontag()) {
+        v = Integer.compare(this.replica.op.priority, that.replica.op.priority);
+        if (v == 0) {
+          v = Integer.compare((Integer) this.replica.val, (Integer) that.replica.val);
+          v = this.replica.op == LESS_THAN ? v : v * -1;
+        }
+        return v;
+      } else {
+        return 0;
+      }
+    } catch (NullPointerException e) {
+      throw e;
+    }
+  }
+
+  static class Condition {
+    final String name;
+    final Object val;
+    final Operand op;
+
+    Condition(String name, Object val, Operand op) {
+      this.name = name;
+      this.val = val;
+      this.op = op;
+    }
+
+    TestStatus match(Row row) {
+      return op.match(val, row.getVal(name));
+    }
+
+    TestStatus match(Object testVal) {
+      return op.match(this.val, testVal);
+    }
+
+    boolean isPass(Object inputVal) {
+      return op.match(val, inputVal) == PASS;
+    }
+
+    boolean isPass(Row row) {
+      return op.match(val, row.getVal(name)) == PASS;
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that instanceof Condition) {
+        Condition c = (Condition) that;
+        return Objects.equals(c.name, name) && Objects.equals(c.val, val) && c.op == op;
+      }
+      return false;
+    }
+
+    public Integer delta(Object val) {
+      return op.delta(this.val, val);
+    }
+  }
+
+  static Condition parse(String s, Map m) {
+    Object expectedVal = null;
+    Object val = m.get(s);
+    try {
+      String conditionName = s.trim();
+      String value = val == null ? null : String.valueOf(val).trim();
+      Operand operand = null;
+      if ((expectedVal = WILDCARD.parse(value)) != null) {
+        operand = WILDCARD;
+      } else if ((expectedVal = NOT_EQUAL.parse(value)) != null) {
+        operand = NOT_EQUAL;
+      } else if ((expectedVal = GREATER_THAN.parse(value)) != null) {
+        operand = GREATER_THAN;
+      } else if ((expectedVal = LESS_THAN.parse(value)) != null) {
+        operand = LESS_THAN;
+      } else {
+        operand = EQUAL;
+        expectedVal = EQUAL.parse(value);
+      }
+
+      return new Condition(conditionName, expectedVal, operand);
+
+    } catch (Exception e) {
+      throw new IllegalArgumentException("Invalid tag : " + s + ":" + val, e);
+    }
+  }
+
+  public class Violation implements MapWriter {
+    final String shard, coll, node;
+    final Object actualVal;
+    final Integer delta;//how far is the actual value from the expected value
+    final Object tagKey;
+    private final int hash;
+
+
+    private Violation(String coll, String shard, String node, Object actualVal, Integer delta, Object tagKey) {
+      this.shard = shard;
+      this.coll = coll;
+      this.node = node;
+      this.delta = delta;
+      this.actualVal = actualVal;
+      this.tagKey = tagKey;
+      hash = ("" + coll + " " + shard + " " + node + " " + String.valueOf(tagKey) + " " + Utils.toJSONString(getClause().toMap(new HashMap<>()))).hashCode();
+    }
+
+    public Clause getClause() {
+      return Clause.this;
+    }
+
+    @Override
+    public int hashCode() {
+      return hash;
+    }
+    //if the delta is lower , this violation is less serious
+    public boolean isLessSerious(Violation that) {
+      return that.delta != null && delta != null &&
+          Math.abs(delta) < Math.abs(that.delta);
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that instanceof Violation) {
+        Violation v = (Violation) that;
+        return Objects.equals(this.shard, v.shard) &&
+            Objects.equals(this.coll, v.coll) &&
+            Objects.equals(this.node, v.node) &&
+            Objects.equals(this.tagKey, v.tagKey)
+            ;
+      }
+      return false;
+    }
+
+    @Override
+    public void writeMap(EntryWriter ew) throws IOException {
+      ew.putIfNotNull("collection", coll);
+      ew.putIfNotNull("shard", shard);
+      ew.putIfNotNull("node", node);
+      ew.putIfNotNull("tagKey", String.valueOf(tagKey));
+      ew.putIfNotNull("violation", (MapWriter) ew1 -> {
+        ew1.put(getClause().isPerCollectiontag() ? "replica" : tag.name,
+            String.valueOf(actualVal));
+        ew1.putIfNotNull("delta", delta);
+      });
+      ew.put("clause", getClause());
+    }
+  }
+
+
+  public List<Violation> test(List<Row> allRows) {
+    List<Violation> violations = new ArrayList<>();
+    if (isPerCollectiontag()) {
+      Map<String, Map<String, Map<String, AtomicInteger>>> replicaCount = computeReplicaCounts(allRows);
+      for (Map.Entry<String, Map<String, Map<String, AtomicInteger>>> e : replicaCount.entrySet()) {
+        if (!collection.isPass(e.getKey())) continue;
+        for (Map.Entry<String, Map<String, AtomicInteger>> shardVsCount : e.getValue().entrySet()) {
+          if (!shard.isPass(shardVsCount.getKey())) continue;
+          for (Map.Entry<String, AtomicInteger> counts : shardVsCount.getValue().entrySet()) {
+            if (!replica.isPass(counts.getValue())) {
+              violations.add(new Violation(
+                  e.getKey(),
+                  shardVsCount.getKey(),
+                  tag.name.equals("node") ? counts.getKey() : null,
+                  counts.getValue(),
+                  replica.delta(counts.getValue()),
+                  counts.getKey()
+              ));
+            }
+          }
+        }
+      }
+    } else {
+      for (Row r : allRows) {
+        if (!tag.isPass(r)) {
+          violations.add(new Violation(null, null, r.node, r.getVal(tag.name), tag.delta(r.getVal(tag.name)), null));
+        }
+      }
+    }
+    return violations;
+
+  }
+
+
+  private Map<String, Map<String, Map<String, AtomicInteger>>> computeReplicaCounts(List<Row> allRows) {
+    Map<String, Map<String, Map<String, AtomicInteger>>> collVsShardVsTagVsCount = new HashMap<>();
+    for (Row row : allRows)
+      for (Map.Entry<String, Map<String, List<ReplicaInfo>>> colls : row.collectionVsShardVsReplicas.entrySet()) {
+        String collectionName = colls.getKey();
+        if (!collection.isPass(collectionName)) continue;
+        collVsShardVsTagVsCount.putIfAbsent(collectionName, new HashMap<>());
+        Map<String, Map<String, AtomicInteger>> collMap = collVsShardVsTagVsCount.get(collectionName);
+        for (Map.Entry<String, List<ReplicaInfo>> shards : colls.getValue().entrySet()) {
+          String shardName = shards.getKey();
+          if (ANY.equals(shard.val)) shardName = ANY;
+          if (!shard.isPass(shardName)) break;
+          collMap.putIfAbsent(shardName, new HashMap<>());
+          Map<String, AtomicInteger> tagVsCount = collMap.get(shardName);
+          Object tagVal = row.getVal(tag.name);
+          tagVsCount.putIfAbsent(tag.isPass(tagVal) ? String.valueOf(tagVal) : "", new AtomicInteger());
+          if (tag.isPass(tagVal)) {
+            tagVsCount.get(String.valueOf(tagVal)).addAndGet(shards.getValue().size());
+          }
+        }
+      }
+    return collVsShardVsTagVsCount;
+  }
+
+  public boolean isStrict() {
+    return strict;
+  }
+
+  @Override
+  public String toString() {
+    return Utils.toJSONString(original);
+  }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    for (Map.Entry<String, Object> e : original.entrySet()) ew.put(e.getKey(), e.getValue());
+  }
+
+  enum TestStatus {
+    NOT_APPLICABLE, FAIL, PASS
+  }
+
+  private static final Set<String> IGNORE_TAGS = new HashSet<>(Arrays.asList(REPLICA, COLLECTION, SHARD, "strict"));
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/ClusterDataProvider.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/ClusterDataProvider.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/ClusterDataProvider.java
new file mode 100644
index 0000000..710db87
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/ClusterDataProvider.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+public interface ClusterDataProvider extends Closeable {
+  /**Get the value of each tag for a given node
+   *
+   * @param node node name
+   * @param tags tag names
+   * @return a map of tag vs value
+   */
+  Map<String, Object> getNodeValues(String node, Collection<String> tags);
+
+  /**
+   * Get the details of each replica in a node. It attempts to fetch as much details about
+   * the replica as mentioned in the keys list. It is not necessary to give al details
+   * <p>
+   * the format is {collection:shard :[{replicadetails}]}
+   */
+  Map<String, Map<String, List<Policy.ReplicaInfo>>> getReplicaInfo(String node, Collection<String> keys);
+
+  Collection<String> getNodes();
+
+  /**Get the collection-specific policy
+   */
+  String getPolicyNameByCollection(String coll);
+
+  @Override
+  default void close() throws IOException {
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/MoveReplicaSuggester.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/MoveReplicaSuggester.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/MoveReplicaSuggester.java
new file mode 100644
index 0000000..97aef51
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/MoveReplicaSuggester.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.util.List;
+
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.cloud.autoscaling.Clause.Violation;
+import org.apache.solr.cloud.autoscaling.Policy.ReplicaInfo;
+import org.apache.solr.cloud.autoscaling.Policy.Suggester;
+import org.apache.solr.common.util.Pair;
+
+public class MoveReplicaSuggester extends Suggester {
+
+  @Override
+  SolrRequest init() {
+    SolrRequest operation = tryEachNode(true);
+    if (operation == null) operation = tryEachNode(false);
+    return operation;
+  }
+
+  SolrRequest tryEachNode(boolean strict) {
+    //iterate through elements and identify the least loaded
+    List<Clause.Violation> leastSeriousViolation = null;
+    Integer targetNodeIndex = null;
+    Integer fromNodeIndex = null;
+    ReplicaInfo fromReplicaInfo = null;
+    for (Pair<ReplicaInfo, Row> fromReplica : getValidReplicas(true, true, -1)) {
+      Row fromRow = fromReplica.second();
+      ReplicaInfo replicaInfo = fromReplica.first();
+      String coll = replicaInfo.collection;
+      String shard = replicaInfo.shard;
+      Pair<Row, ReplicaInfo> pair = fromRow.removeReplica(coll, shard);
+      Row tmpRow = pair.first();
+      if (tmpRow == null) {
+        //no such replica available
+        continue;
+      }
+      tmpRow.violations.clear();
+
+      final int i = getMatrix().indexOf(fromRow);
+      for (int j = getMatrix().size() - 1; j > i; j--) {
+        Row targetRow = getMatrix().get(j);
+        if (!isAllowed(targetRow.node, Hint.TARGET_NODE)) continue;
+        targetRow = targetRow.addReplica(coll, shard);
+        targetRow.violations.clear();
+        List<Violation> errs = testChangedMatrix(strict, getModifiedMatrix(getModifiedMatrix(getMatrix(), tmpRow, i), targetRow, j));
+        if (!containsNewErrors(errs) && isLessSerious(errs, leastSeriousViolation)) {
+          leastSeriousViolation = errs;
+          targetNodeIndex = j;
+          fromNodeIndex = i;
+          fromReplicaInfo = replicaInfo;
+        }
+      }
+    }
+    if (targetNodeIndex != null && fromNodeIndex != null) {
+      getMatrix().set(fromNodeIndex, getMatrix().get(fromNodeIndex).removeReplica(fromReplicaInfo.collection, fromReplicaInfo.shard).first());
+      getMatrix().set(targetNodeIndex, getMatrix().get(targetNodeIndex).addReplica(fromReplicaInfo.collection, fromReplicaInfo.shard));
+      return new CollectionAdminRequest.MoveReplica(
+          fromReplicaInfo.collection,
+          fromReplicaInfo.name,
+          getMatrix().get(targetNodeIndex).node);
+    }
+    return null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Operand.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Operand.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Operand.java
new file mode 100644
index 0000000..5371c25
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Operand.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.util.Objects;
+
+import org.apache.solr.cloud.autoscaling.Clause.TestStatus;
+
+import static org.apache.solr.cloud.autoscaling.Clause.TestStatus.FAIL;
+import static org.apache.solr.cloud.autoscaling.Clause.TestStatus.NOT_APPLICABLE;
+import static org.apache.solr.cloud.autoscaling.Clause.TestStatus.PASS;
+import static org.apache.solr.cloud.autoscaling.Policy.ANY;
+
+
+public enum Operand {
+  WILDCARD(ANY, Integer.MAX_VALUE) {
+    @Override
+    public TestStatus match(Object ruleVal, Object testVal) {
+      return testVal == null ? NOT_APPLICABLE : PASS;
+    }
+
+    @Override
+    public Object parse(String val) {
+      if (val == null) return ANY;
+      return ANY.equals(val) || Policy.EACH.equals(val) ? val : null;
+    }
+  },
+  EQUAL("", 0) {
+    @Override
+    public int _delta(int expected, int actual) {
+      return expected - actual;
+    }
+  },
+  NOT_EQUAL("!", 2) {
+    @Override
+    public TestStatus match(Object ruleVal, Object testVal) {
+      return super.match(ruleVal, testVal) == PASS ? FAIL : PASS;
+    }
+
+    @Override
+    public int _delta(int expected, int actual) {
+      return expected - actual;
+    }
+
+  },
+  GREATER_THAN(">", 1) {
+    @Override
+    public Object parse(String val) {
+      return checkNumeric(super.parse(val));
+    }
+
+
+    @Override
+    public TestStatus match(Object ruleVal, Object testVal) {
+      if (testVal == null) return NOT_APPLICABLE;
+      return compareNum(ruleVal, testVal) == 1 ? PASS : FAIL;
+    }
+
+    @Override
+    protected int _delta(int expected, int actual) {
+      return actual > expected ? 0 : (expected + 1) - actual;
+    }
+  },
+  LESS_THAN("<", 2) {
+    @Override
+    public TestStatus match(Object ruleVal, Object testVal) {
+      if (testVal == null) return NOT_APPLICABLE;
+      return compareNum(ruleVal, testVal) == -1 ? PASS : FAIL;
+    }
+
+    @Override
+    protected int _delta(int expected, int actual) {
+      return actual < expected ? 0 : (expected ) - actual;
+    }
+
+    @Override
+    public Object parse(String val) {
+      return checkNumeric(super.parse(val));
+    }
+  };
+  public final String operand;
+  final int priority;
+
+  Operand(String val, int priority) {
+    this.operand = val;
+    this.priority = priority;
+  }
+
+  public String toStr(Object expectedVal) {
+    return operand + expectedVal.toString();
+  }
+
+  Integer checkNumeric(Object val) {
+    if (val == null) return null;
+    try {
+      return Integer.parseInt(val.toString());
+    } catch (NumberFormatException e) {
+      throw new RuntimeException("for operand " + operand + " the value must be numeric");
+    }
+  }
+
+  public Object parse(String val) {
+    if (operand.isEmpty()) return val;
+    return val.startsWith(operand) ? val.substring(1) : null;
+  }
+
+  public TestStatus match(Object ruleVal, Object testVal) {
+    return Objects.equals(String.valueOf(ruleVal), String.valueOf(testVal)) ? PASS : FAIL;
+  }
+
+
+  public int compareNum(Object n1Val, Object n2Val) {
+    Integer n1 = (Integer) parseObj(n1Val, Integer.class);
+    Integer n2 = (Integer) parseObj(n2Val, Integer.class);
+    return n1 > n2 ? -1 : Objects.equals(n1, n2) ? 0 : 1;
+  }
+
+  Object parseObj(Object o, Class typ) {
+    if (o == null) return o;
+    if (typ == String.class) return String.valueOf(o);
+    if (typ == Integer.class) {
+      return Integer.parseInt(String.valueOf(o));
+    }
+    return o;
+  }
+
+  public Integer delta(Object expected, Object actual) {
+    try {
+      Integer expectedInt = Integer.parseInt(String.valueOf(expected));
+      Integer actualInt = Integer.parseInt(String.valueOf(actual));
+      return _delta(expectedInt, actualInt);
+    } catch (Exception e) {
+      return null;
+    }
+  }
+
+  protected int _delta(int expected, int actual) {
+    return 0;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Policy.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Policy.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Policy.java
new file mode 100644
index 0000000..c8a661e
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Policy.java
@@ -0,0 +1,508 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.cloud.autoscaling.Clause.Violation;
+import org.apache.solr.common.IteratorWriter;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.params.CollectionParams.CollectionAction;
+import org.apache.solr.common.util.Pair;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
+
+import static java.util.Collections.emptyList;
+import static java.util.Collections.emptyMap;
+import static java.util.stream.Collectors.toList;
+
+/*The class that reads, parses and applies policies specified in
+ * autoscaling.json
+ *
+ * Create one instance of this class per unique autoscaling.json.
+ * This is immutable and is thread-safe
+ *
+ * Create a fresh new session for each use
+ *
+ */
+public class Policy implements MapWriter {
+  public static final String EACH = "#EACH";
+  public static final String ANY = "#ANY";
+  public static final String CLUSTER_POLICY = "cluster-policy";
+  public static final String CLUSTER_PREFERENCE = "cluster-preferences";
+  public static final Set<String> GLOBAL_ONLY_TAGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("cores")));
+  final Map<String, List<Clause>> policies = new HashMap<>();
+  final List<Clause> clusterPolicy;
+  final List<Preference> clusterPreferences;
+  final List<String> params = new ArrayList<>();
+
+
+  public Policy(Map<String, Object> jsonMap) {
+
+    clusterPreferences = ((List<Map<String, Object>>) jsonMap.getOrDefault(CLUSTER_PREFERENCE, emptyList())).stream()
+        .map(Preference::new)
+        .collect(toList());
+    for (int i = 0; i < clusterPreferences.size() - 1; i++) {
+      Preference preference = clusterPreferences.get(i);
+      preference.next = clusterPreferences.get(i + 1);
+    }
+    if (clusterPreferences.isEmpty()) {
+      clusterPreferences.add(new Preference((Map<String, Object>) Utils.fromJSONString("{minimize : cores, precision:1}")));
+    }
+    clusterPolicy = ((List<Map<String, Object>>) jsonMap.getOrDefault(CLUSTER_POLICY, emptyList())).stream()
+        .map(Clause::new)
+        .collect(Collectors.toList());
+
+    ((Map<String, List<Map<String, Object>>>) jsonMap.getOrDefault("policies", emptyMap())).forEach((s, l1) ->
+        this.policies.put(s, l1.stream()
+            .map(Clause::new)
+            .sorted()
+            .collect(toList())));
+
+    this.policies.forEach((s, c) -> {
+      for (Clause clause : c) {
+        if (!clause.isPerCollectiontag())
+          throw new RuntimeException(clause.globalTag.name + " is only allowed in 'cluster-policy'");
+      }
+    });
+
+    for (Preference preference : clusterPreferences) {
+      if (params.contains(preference.name.name())) {
+        throw new RuntimeException(preference.name + " is repeated");
+      }
+      params.add(preference.name.toString());
+      preference.idx = params.size() - 1;
+    }
+  }
+
+  public List<Clause> getClusterPolicy() {
+    return clusterPolicy;
+  }
+
+  public List<Preference> getClusterPreferences() {
+    return clusterPreferences;
+  }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    if (!policies.isEmpty()) {
+      ew.put("policies", (MapWriter) ew1 -> {
+        for (Map.Entry<String, List<Clause>> e : policies.entrySet()) {
+          ew1.put(e.getKey(), e.getValue());
+        }
+      });
+    }
+    if (!clusterPreferences.isEmpty()) {
+      ew.put("preferences", (IteratorWriter) iw -> {
+        for (Preference p : clusterPreferences) iw.add(p);
+      });
+    }
+
+  }
+
+  /*This stores the logical state of the system, given a policy and
+   * a cluster state.
+   *
+   */
+  public class Session implements MapWriter {
+    final List<String> nodes;
+    final ClusterDataProvider dataProvider;
+    final List<Row> matrix;
+    Set<String> collections = new HashSet<>();
+    List<Clause> expandedClauses;
+    List<Violation> violations = new ArrayList<>();
+    private List<String> paramsOfInterest;
+
+    private Session(List<String> nodes, ClusterDataProvider dataProvider,
+                    List<Row> matrix, List<Clause> expandedClauses,
+                    List<String> paramsOfInterest) {
+      this.nodes = nodes;
+      this.dataProvider = dataProvider;
+      this.matrix = matrix;
+      this.expandedClauses = expandedClauses;
+      this.paramsOfInterest = paramsOfInterest;
+    }
+
+    Session(ClusterDataProvider dataProvider) {
+      this.nodes = new ArrayList<>(dataProvider.getNodes());
+      this.dataProvider = dataProvider;
+      for (String node : nodes) {
+        collections.addAll(dataProvider.getReplicaInfo(node, Collections.emptyList()).keySet());
+      }
+
+      expandedClauses = clusterPolicy.stream()
+          .filter(clause -> !clause.isPerCollectiontag())
+          .collect(Collectors.toList());
+
+      for (String c : collections) {
+        addClausesForCollection(dataProvider, c);
+      }
+
+      Collections.sort(expandedClauses);
+      List<String> p = new ArrayList<>(params);
+      p.addAll(expandedClauses.stream().map(clause -> clause.tag.name).distinct().collect(Collectors.toList()));
+      paramsOfInterest = new ArrayList<>(p);
+      matrix = new ArrayList<>(nodes.size());
+      for (String node : nodes) matrix.add(new Row(node, paramsOfInterest, dataProvider));
+      applyRules();
+    }
+
+    private void addClausesForCollection(ClusterDataProvider dataProvider, String c) {
+      String p = dataProvider.getPolicyNameByCollection(c);
+      if (p != null) {
+        List<Clause> perCollPolicy = policies.get(p);
+        if (perCollPolicy == null)
+          throw new RuntimeException(StrUtils.formatString("Policy for collection {0} is {1} . It does not exist", c, p));
+      }
+      expandedClauses.addAll(mergePolicies(c, policies.getOrDefault(p, emptyList()), clusterPolicy));
+    }
+
+    Session copy() {
+      return new Session(nodes, dataProvider, getMatrixCopy(), expandedClauses, paramsOfInterest);
+    }
+
+    List<Row> getMatrixCopy() {
+      return matrix.stream()
+          .map(Row::copy)
+          .collect(Collectors.toList());
+    }
+
+    Policy getPolicy() {
+      return Policy.this;
+
+    }
+
+    /**
+     * Apply the preferences and conditions
+     */
+    private void applyRules() {
+      if (!clusterPreferences.isEmpty()) {
+        //this is to set the approximate value according to the precision
+        ArrayList<Row> tmpMatrix = new ArrayList<>(matrix);
+        for (Preference p : clusterPreferences) {
+          Collections.sort(tmpMatrix, (r1, r2) -> p.compare(r1, r2, false));
+          p.setApproxVal(tmpMatrix);
+        }
+        //approximate values are set now. Let's do recursive sorting
+        Collections.sort(matrix, (r1, r2) -> {
+          int result = clusterPreferences.get(0).compare(r1, r2, true);
+          if (result == 0) result = clusterPreferences.get(0).compare(r1, r2, false);
+          return result;
+        });
+      }
+
+      for (Clause clause : expandedClauses) {
+        List<Violation> errs = clause.test(matrix);
+        violations.addAll(errs);
+      }
+    }
+
+    public List<Violation> getViolations() {
+      return violations;
+    }
+
+    public Suggester getSuggester(CollectionAction action) {
+      Suggester op = ops.get(action).get();
+      if (op == null) throw new UnsupportedOperationException(action.toString() + "is not supported");
+      op._init(this);
+      return op;
+    }
+
+    @Override
+    public void writeMap(EntryWriter ew) throws IOException {
+      for (int i = 0; i < matrix.size(); i++) {
+        Row row = matrix.get(i);
+        ew.put(row.node, row);
+      }
+    }
+
+    @Override
+    public String toString() {
+      return Utils.toJSONString(toMap(new LinkedHashMap<>()));
+    }
+
+    public List<Row> getSorted() {
+      return Collections.unmodifiableList(matrix);
+    }
+  }
+
+
+  public Session createSession(ClusterDataProvider dataProvider) {
+    return new Session(dataProvider);
+  }
+
+  enum SortParam {
+    freedisk(0, Integer.MAX_VALUE), cores(0, Integer.MAX_VALUE), heapUsage(0, Integer.MAX_VALUE), sysLoadAvg(0, 100);
+
+    public final int min,max;
+
+    SortParam(int min, int max) {
+      this.min = min;
+      this.max = max;
+    }
+
+    static SortParam get(String m) {
+      for (SortParam p : values()) if (p.name().equals(m)) return p;
+      throw new RuntimeException(StrUtils.formatString("Invalid sort {0} Sort must be on one of these {1}", m, Arrays.asList(values())));
+    }
+  }
+
+  enum Sort {
+    maximize(1), minimize(-1);
+    final int sortval;
+
+    Sort(int i) {
+      sortval = i;
+    }
+
+    static Sort get(Map<String, Object> m) {
+      if (m.containsKey(maximize.name()) && m.containsKey(minimize.name())) {
+        throw new RuntimeException("Cannot have both 'maximize' and 'minimize'");
+      }
+      if (m.containsKey(maximize.name())) return maximize;
+      if (m.containsKey(minimize.name())) return minimize;
+      throw new RuntimeException("must have either 'maximize' or 'minimize'");
+    }
+  }
+
+
+  public static class ReplicaInfo implements MapWriter {
+    final String name;
+    String core, collection, shard;
+    Map<String, Object> variables;
+
+    public ReplicaInfo(String name, String coll, String shard, Map<String, Object> vals) {
+      this.name = name;
+      this.variables = vals;
+      this.collection = coll;
+      this.shard = shard;
+    }
+
+    @Override
+    public void writeMap(EntryWriter ew) throws IOException {
+      ew.put(name, variables);
+    }
+
+    public String getCore() {
+      return core;
+    }
+
+    public String getCollection() {
+      return collection;
+    }
+
+    public String getShard() {
+      return shard;
+    }
+  }
+
+
+  /* A suggester is capable of suggesting a collection operation
+   * given a particular session. Before it suggests a new operation,
+   * it ensures that ,
+   *  a) load is reduced on the most loaded node
+   *  b) it causes no new violations
+   *
+   */
+  public static abstract class Suggester {
+    protected final EnumMap<Hint, Object> hints = new EnumMap<>(Hint.class);
+    Policy.Session session;
+    SolrRequest operation;
+    protected List<Violation> originalViolations = new ArrayList<>();
+    private boolean isInitialized = false;
+
+    private void _init(Session session) {
+      this.session = session.copy();
+    }
+
+    public Suggester hint(Hint hint, Object value) {
+      hints.put(hint, value);
+      return this;
+    }
+
+    abstract SolrRequest init();
+
+
+    public SolrRequest getOperation() {
+      if (!isInitialized) {
+        String coll = (String) hints.get(Hint.COLL);
+        String shard = (String) hints.get(Hint.SHARD);
+        // if this is not a known collection from the existing clusterstate,
+        // then add it
+        if (session.matrix.stream().noneMatch(row -> row.collectionVsShardVsReplicas.containsKey(coll))) {
+          session.addClausesForCollection(session.dataProvider, coll);
+          Collections.sort(session.expandedClauses);
+        }
+        if (coll != null) {
+          for (Row row : session.matrix) {
+            if (!row.collectionVsShardVsReplicas.containsKey(coll)) row.collectionVsShardVsReplicas.put(coll, new HashMap<>());
+            if (shard != null) {
+              Map<String, List<ReplicaInfo>> shardInfo = row.collectionVsShardVsReplicas.get(coll);
+              if (!shardInfo.containsKey(shard)) shardInfo.put(shard, new ArrayList<>());
+            }
+          }
+        }
+        session.applyRules();
+        originalViolations.addAll(session.getViolations());
+        this.operation = init();
+        isInitialized = true;
+      }
+      return operation;
+    }
+
+    public Session getSession() {
+      return session;
+    }
+
+    List<Row> getMatrix() {
+      return session.matrix;
+
+    }
+
+    //check if the fresh set of violations is less serious than the last set of violations
+    boolean isLessSerious(List<Violation> fresh, List<Violation> old) {
+      if (old == null || fresh.size() < old.size()) return true;
+      if (fresh.size() == old.size()) {
+        for (int i = 0; i < fresh.size(); i++) {
+          Violation freshViolation = fresh.get(i);
+          Violation oldViolation = null;
+          for (Violation v : old) {
+            if (v.equals(freshViolation)) oldViolation = v;
+          }
+          if (oldViolation != null && freshViolation.isLessSerious(oldViolation)) return true;
+        }
+      }
+      return false;
+    }
+
+    boolean containsNewErrors(List<Violation> violations) {
+      for (Violation v : violations) {
+        int idx = originalViolations.indexOf(v);
+        if (idx < 0 || originalViolations.get(idx).isLessSerious(v)) return true;
+      }
+      return false;
+    }
+
+    List<Pair<ReplicaInfo, Row>> getValidReplicas(boolean sortDesc, boolean isSource, int until) {
+      List<Pair<Policy.ReplicaInfo, Row>> allPossibleReplicas = new ArrayList<>();
+
+      if (sortDesc) {
+        if (until == -1) until = getMatrix().size();
+        for (int i = 0; i < until; i++) addReplicaToList(getMatrix().get(i), isSource, allPossibleReplicas);
+      } else {
+        if (until == -1) until = 0;
+        for (int i = getMatrix().size() - 1; i >= until; i--)
+          addReplicaToList(getMatrix().get(i), isSource, allPossibleReplicas);
+      }
+      return allPossibleReplicas;
+    }
+
+    void addReplicaToList(Row r, boolean isSource, List<Pair<Policy.ReplicaInfo, Row>> replicaList) {
+      if (!isAllowed(r.node, isSource ? Hint.SRC_NODE : Hint.TARGET_NODE)) return;
+      for (Map.Entry<String, Map<String, List<Policy.ReplicaInfo>>> e : r.collectionVsShardVsReplicas.entrySet()) {
+        if (!isAllowed(e.getKey(), Hint.COLL)) continue;
+        for (Map.Entry<String, List<Policy.ReplicaInfo>> shard : e.getValue().entrySet()) {
+          if (!isAllowed(e.getKey(), Hint.SHARD)) continue;
+          replicaList.add(new Pair<>(shard.getValue().get(0), r));
+        }
+      }
+    }
+
+    protected List<Violation> testChangedMatrix(boolean strict, List<Row> rows) {
+      List<Violation> errors = new ArrayList<>();
+      for (Clause clause : session.expandedClauses) {
+        if (strict || clause.strict) {
+          List<Violation> errs = clause.test(rows);
+          if (!errs.isEmpty()) {
+            errors.addAll(errs);
+          }
+        }
+      }
+      return errors;
+    }
+
+    ArrayList<Row> getModifiedMatrix(List<Row> matrix, Row tmpRow, int i) {
+      ArrayList<Row> copy = new ArrayList<>(matrix);
+      copy.set(i, tmpRow);
+      return copy;
+    }
+
+    protected boolean isAllowed(Object v, Hint hint) {
+      Object hintVal = hints.get(hint);
+      return hintVal == null || Objects.equals(v, hintVal);
+    }
+
+    public enum Hint {
+      COLL, SHARD, SRC_NODE, TARGET_NODE
+    }
+
+
+  }
+
+  static List<Clause> mergePolicies(String coll,
+                                    List<Clause> collPolicy,
+                                    List<Clause> globalPolicy) {
+
+    List<Clause> merged = insertColl(coll, collPolicy);
+    List<Clause> global = insertColl(coll, globalPolicy);
+    merged.addAll(global.stream()
+        .filter(clusterPolicyClause -> merged.stream().noneMatch(perCollPolicy -> perCollPolicy.doesOverride(clusterPolicyClause)))
+        .collect(Collectors.toList()));
+    return merged;
+  }
+
+  /**
+   * Insert the collection name into the clauses where collection is not specified
+   */
+  static List<Clause> insertColl(String coll, Collection<Clause> conditions) {
+    return conditions.stream()
+        .filter(Clause::isPerCollectiontag)
+        .map(clause -> {
+          Map<String, Object> copy = new LinkedHashMap<>(clause.original);
+          if (!copy.containsKey("collection")) copy.put("collection", coll);
+          return new Clause(copy);
+        })
+        .filter(it -> (it.collection.isPass(coll)))
+        .collect(Collectors.toList());
+
+  }
+
+  private static final Map<CollectionAction, Supplier<Suggester>> ops = new HashMap<>();
+
+  static {
+    ops.put(CollectionAction.ADDREPLICA, () -> new AddReplicaSuggester());
+    ops.put(CollectionAction.MOVEREPLICA, () -> new MoveReplicaSuggester());
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/PolicyHelper.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/PolicyHelper.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/PolicyHelper.java
new file mode 100644
index 0000000..ad01941
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/PolicyHelper.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.cloud.autoscaling.Policy.Suggester.Hint;
+
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
+
+public class PolicyHelper {
+  public static Map<String, List<String>> getReplicaLocations(String collName, Map<String, Object> autoScalingJson,
+                                                              ClusterDataProvider cdp,
+                                                              Map<String, String> optionalPolicyMapping,
+                                                              List<String> shardNames,
+                                                              int repFactor) {
+    Map<String, List<String>> positionMapping = new HashMap<>();
+    for (String shardName : shardNames) positionMapping.put(shardName, new ArrayList<>(repFactor));
+    if (optionalPolicyMapping != null) {
+      final ClusterDataProvider delegate = cdp;
+      cdp = new ClusterDataProvider() {
+        @Override
+        public Map<String, Object> getNodeValues(String node, Collection<String> tags) {
+          return delegate.getNodeValues(node, tags);
+        }
+
+        @Override
+        public Map<String, Map<String, List<Policy.ReplicaInfo>>> getReplicaInfo(String node, Collection<String> keys) {
+          return delegate.getReplicaInfo(node, keys);
+        }
+
+        @Override
+        public Collection<String> getNodes() {
+          return delegate.getNodes();
+        }
+
+        @Override
+        public String getPolicyNameByCollection(String coll) {
+          return optionalPolicyMapping.containsKey(coll) ?
+              optionalPolicyMapping.get(coll) :
+              delegate.getPolicyNameByCollection(coll);
+        }
+      };
+
+    }
+
+
+    Policy policy = new Policy(autoScalingJson);
+    Policy.Session session = policy.createSession(cdp);
+    for (String shardName : shardNames) {
+      for (int i = 0; i < repFactor; i++) {
+        Policy.Suggester suggester = session.getSuggester(ADDREPLICA)
+            .hint(Hint.COLL, collName)
+            .hint(Hint.SHARD, shardName);
+        SolrRequest op = suggester.getOperation();
+        if (op == null) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No node can satisfy the rules "+ Utils.toJSONString(Utils.getDeepCopy(session.expandedClauses, 4, true)));
+        }
+        session = suggester.getSession();
+        positionMapping.get(shardName).add(op.getParams().get(CoreAdminParams.NODE));
+      }
+    }
+
+    return positionMapping;
+  }
+
+  public List<Map> addNode(Map<String, Object> autoScalingJson, String node, ClusterDataProvider cdp) {
+    //todo
+    return null;
+
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Preference.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Preference.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Preference.java
new file mode 100644
index 0000000..69a9b9e
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Preference.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
+
+class Preference implements MapWriter {
+  final Policy.SortParam name;
+  Integer precision;
+  final Policy.Sort sort;
+  Preference next;
+  public int idx;
+  private final Map original;
+
+  Preference(Map<String, Object> m) {
+    this.original = Utils.getDeepCopy(m,3);
+    sort = Policy.Sort.get(m);
+    name = Policy.SortParam.get(m.get(sort.name()).toString());
+    Object p = m.getOrDefault("precision", 0);
+    precision = p instanceof Number ? ((Number) p).intValue() : Integer.parseInt(p.toString());
+    if (precision < 0) {
+      throw new RuntimeException("precision must be a positive value ");
+    }
+    if(precision< name.min || precision> name.max){
+      throw new RuntimeException(StrUtils.formatString("invalid precision value {0} must lie between {1} and {1}",
+          precision, name.min, name.max ) );
+    }
+
+  }
+
+  // there are 2 modes of compare.
+  // recursive, it uses the precision to tie & when there is a tie use the next preference to compare
+  // in non-recursive mode, precision is not taken into consideration and sort is done on actual value
+  int compare(Row r1, Row r2, boolean useApprox) {
+    Object o1 = useApprox ? r1.cells[idx].approxVal : r1.cells[idx].val;
+    Object o2 = useApprox ? r2.cells[idx].approxVal : r2.cells[idx].val;
+    int result = 0;
+    if (o1 instanceof Integer && o2 instanceof Integer) result = ((Integer) o1).compareTo((Integer) o2);
+    if (o1 instanceof Long && o2 instanceof Long) result = ((Long) o1).compareTo((Long) o2);
+    if (o1 instanceof Float && o2 instanceof Float) result = ((Float) o1).compareTo((Float) o2);
+    if (o1 instanceof Double && o2 instanceof Double) result = ((Double) o1).compareTo((Double) o2);
+    return result == 0 ? next == null ? 0 : next.compare(r1, r2, useApprox) : sort.sortval * result;
+  }
+
+  //sets the new value according to precision in val_
+  void setApproxVal(List<Row> tmpMatrix) {
+    Object prevVal = null;
+    for (Row row : tmpMatrix) {
+      prevVal = row.cells[idx].approxVal =
+          prevVal == null || Math.abs(((Number) prevVal).longValue() - ((Number) row.cells[idx].val).longValue()) > precision ?
+              row.cells[idx].val :
+              prevVal;
+    }
+  }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    for (Object o : original.entrySet()) {
+      Map.Entry e = (Map.Entry) o;
+      ew.put(String.valueOf(e.getKey()), e.getValue());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Row.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Row.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Row.java
new file mode 100644
index 0000000..a2546d0
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/Row.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.solr.common.IteratorWriter;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.util.Pair;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.cloud.autoscaling.Policy.ReplicaInfo;
+
+import static org.apache.solr.common.params.CoreAdminParams.NODE;
+
+
+class Row implements MapWriter {
+  public final String node;
+  final Cell[] cells;
+  Map<String, Map<String, List<ReplicaInfo>>> collectionVsShardVsReplicas;
+  List<Clause> violations = new ArrayList<>();
+  boolean anyValueMissing = false;
+
+  Row(String node, List<String> params, ClusterDataProvider dataProvider) {
+    collectionVsShardVsReplicas = dataProvider.getReplicaInfo(node, params);
+    if (collectionVsShardVsReplicas == null) collectionVsShardVsReplicas = new HashMap<>();
+    this.node = node;
+    cells = new Cell[params.size()];
+    Map<String, Object> vals = dataProvider.getNodeValues(node, params);
+    for (int i = 0; i < params.size(); i++) {
+      String s = params.get(i);
+      cells[i] = new Cell(i, s, vals.get(s));
+      if (NODE.equals(s)) cells[i].val = node;
+      if (cells[i].val == null) anyValueMissing = true;
+    }
+  }
+
+  Row(String node, Cell[] cells, boolean anyValueMissing, Map<String, Map<String, List<ReplicaInfo>>> collectionVsShardVsReplicas, List<Clause> violations) {
+    this.node = node;
+    this.cells = new Cell[cells.length];
+    for (int i = 0; i < this.cells.length; i++) {
+      this.cells[i] = cells[i].copy();
+
+    }
+    this.anyValueMissing = anyValueMissing;
+    this.collectionVsShardVsReplicas = collectionVsShardVsReplicas;
+    this.violations = violations;
+  }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    ew.put(node, (IteratorWriter) iw -> {
+      iw.add((MapWriter) e -> e.put("replicas", collectionVsShardVsReplicas));
+      for (Cell cell : cells) iw.add(cell);
+    });
+  }
+
+  Row copy() {
+    return new Row(node, cells, anyValueMissing, Utils.getDeepCopy(collectionVsShardVsReplicas, 3), new ArrayList<>(violations));
+  }
+
+  Object getVal(String name) {
+    for (Cell cell : cells) if (cell.name.equals(name)) return cell.val;
+    return null;
+  }
+
+  @Override
+  public String toString() {
+    return node;
+  }
+
+  // this adds a replica to the replica info
+  Row addReplica(String coll, String shard) {
+    Row row = copy();
+    Map<String, List<ReplicaInfo>> c = row.collectionVsShardVsReplicas.get(coll);
+    if (c == null) row.collectionVsShardVsReplicas.put(coll, c = new HashMap<>());
+    List<ReplicaInfo> replicas = c.get(shard);
+    if (replicas == null) c.put(shard, replicas = new ArrayList<>());
+    replicas.add(new ReplicaInfo("" + new Random().nextInt(1000) + 1000, coll, shard, new HashMap<>()));
+    for (Cell cell : row.cells) {
+      if (cell.name.equals("cores")) cell.val = ((Number) cell.val).intValue() + 1;
+    }
+    return row;
+
+  }
+
+  Pair<Row, ReplicaInfo> removeReplica(String coll, String shard) {
+    Row row = copy();
+    Map<String, List<ReplicaInfo>> c = row.collectionVsShardVsReplicas.get(coll);
+    if (c == null) return null;
+    List<ReplicaInfo> s = c.get(shard);
+    if (s == null || s.isEmpty()) return null;
+    return new Pair(row, s.remove(0));
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/package-info.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/package-info.java b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/package-info.java
new file mode 100644
index 0000000..a0167fa
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/cloud/autoscaling/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Common classes for autoscaling parsing filtering nodes and sorting
+ */
+
+package org.apache.solr.cloud.autoscaling;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java b/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java
index 0049a5b..cbfb584 100644
--- a/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java
+++ b/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java
@@ -19,13 +19,16 @@ package org.apache.solr.common;
 
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
 
 /**
  * Interface to help do push writing to an array
  */
 public interface IteratorWriter {
   /**
-   * @param iw after this method returns , the EntryWriter Object is invalid
+   * @param iw after this method returns , the ItemWriter Object is invalid
    *          Do not hold a reference to this object
    */
   void writeIter(ItemWriter iw) throws IOException;
@@ -62,4 +65,20 @@ public interface IteratorWriter {
       return this;
     }
   }
+  default List toList( List l)  {
+    try {
+      writeIter(new ItemWriter() {
+        @Override
+        public ItemWriter add(Object o) throws IOException {
+          if (o instanceof MapWriter) o = ((MapWriter) o).toMap(new LinkedHashMap<>());
+          if (o instanceof IteratorWriter) o = ((IteratorWriter) o).toList(new ArrayList<>());
+          l.add(o);
+          return this;
+        }
+      });
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+    return l;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
index 8fb9d03..fca6e2b 100644
--- a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
+++ b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
@@ -19,6 +19,8 @@ package org.apache.solr.common;
 
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.Map;
 
 /**
@@ -34,6 +36,8 @@ public interface MapWriter extends MapSerializable {
       writeMap(new EntryWriter() {
         @Override
         public EntryWriter put(String k, Object v) throws IOException {
+          if (v instanceof MapWriter) v = ((MapWriter) v).toMap(new LinkedHashMap<>());
+          if (v instanceof IteratorWriter) v = ((IteratorWriter) v).toList(new ArrayList<>());
           map.put(k, v);
           return this;
         }
@@ -60,6 +64,12 @@ public interface MapWriter extends MapSerializable {
      */
     EntryWriter put(String k, Object v) throws IOException;
 
+    default EntryWriter putIfNotNull(String k, Object v) throws IOException {
+      if(v != null) put(k,v);
+      return this;
+    }
+
+
     default EntryWriter put(String k, int v) throws IOException {
       put(k, (Integer) v);
       return this;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java b/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java
index 5c3f895..6f663c5 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
+import java.util.function.BiConsumer;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
@@ -170,6 +171,10 @@ public class DocCollection extends ZkNodeProps implements Iterable<Slice> {
     return slices.get(sliceName);
   }
 
+  public void forEachReplica(BiConsumer<String, Replica> consumer) {
+    slices.forEach((shard, slice) -> slice.getReplicasMap().forEach((s, replica) -> consumer.accept(shard, replica)));
+  }
+
   /**
    * Gets the list of all slices for this collection.
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
index 66033bc..507f719 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
@@ -32,6 +32,7 @@ import java.lang.invoke.MethodHandles;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Path;
 import java.util.List;
+import java.util.Map;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.RejectedExecutionException;
 import java.util.regex.Pattern;
@@ -44,6 +45,7 @@ import org.apache.solr.common.cloud.ZkClientConnectionStrategy.ZkUpdate;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
+import org.apache.solr.common.util.Utils;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NoNodeException;
@@ -362,6 +364,19 @@ public class SolrZkClient implements Closeable {
     }
   }
 
+  public Map<String, Object> getJson(String path, boolean retryOnConnLoss) throws KeeperException, InterruptedException {
+    byte[] bytes = null;
+    try {
+      bytes = getData(path, null, null, retryOnConnLoss);
+    } catch (KeeperException.NoNodeException e) {
+      return null;
+    }
+    if (bytes != null && bytes.length > 0) {
+      return (Map<String, Object>) Utils.fromJSON(bytes);
+    }
+    return null;
+  }
+
   /**
    * Returns node's state
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index 158b53a..1b03534 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -93,6 +93,7 @@ public class ZkStateReader implements Closeable {
   public static final String CLUSTER_PROPS = "/clusterprops.json";
   public static final String REJOIN_AT_HEAD_PROP = "rejoinAtHead";
   public static final String SOLR_SECURITY_CONF_PATH = "/security.json";
+  public static final String SOLR_AUTOSCALING_CONF_PATH = "/autoscaling.json";
 
   public static final String REPLICATION_FACTOR = "replicationFactor";
   public static final String MAX_SHARDS_PER_NODE = "maxShardsPerNode";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java
index 1ae618d..a2af163 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java
@@ -46,7 +46,10 @@ public class ImplicitSnitch extends Snitch {
   public static final String CORES = "cores";
   public static final String DISK = "freedisk";
   public static final String ROLE = "role";
+  public static final String NODEROLE = "nodeRole";
   public static final String SYSPROP = "sysprop.";
+  public static final String SYSLOADAVG = "sysLoadAvg";
+  public static final String HEAPUSAGE = "heapUsage";
   public static final List<String> IP_SNITCHES = Collections.unmodifiableList(Arrays.asList("ip_1", "ip_2", "ip_3", "ip_4"));
   public static final Set<String> tags = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(NODE, PORT, HOST, CORES, DISK, ROLE, "ip_1", "ip_2", "ip_3", "ip_4")));
 
@@ -61,9 +64,15 @@ public class ImplicitSnitch extends Snitch {
       Matcher hostAndPortMatcher = hostAndPortPattern.matcher(solrNode);
       if (hostAndPortMatcher.find()) ctx.getTags().put(PORT, hostAndPortMatcher.group(2));
     }
-    if (requestedTags.contains(ROLE)) fillRole(solrNode, ctx);
+    if (requestedTags.contains(ROLE)) fillRole(solrNode, ctx, ROLE);
+    if (requestedTags.contains(NODEROLE)) fillRole(solrNode, ctx, NODEROLE);// for new policy framework
+
     addIpTags(solrNode, requestedTags, ctx);
 
+    getRemoteInfo(solrNode, requestedTags, ctx);
+  }
+
+  protected void getRemoteInfo(String solrNode, Set<String> requestedTags, SnitchContext ctx) {
     ModifiableSolrParams params = new ModifiableSolrParams();
     if (requestedTags.contains(CORES)) params.add(CORES, "1");
     if (requestedTags.contains(DISK)) params.add(DISK, "1");
@@ -73,7 +82,7 @@ public class ImplicitSnitch extends Snitch {
     if (params.size() > 0) ctx.invokeRemote(solrNode, params, "org.apache.solr.cloud.rule.ImplicitSnitch", null);
   }
 
-  private void fillRole(String solrNode, SnitchContext ctx) {
+  private void fillRole(String solrNode, SnitchContext ctx, String key) {
     Map roles = (Map) ctx.retrieve(ZkStateReader.ROLES); // we don't want to hit the ZK for each node
     // so cache and reuse
     if(roles == null) roles = ctx.getZkJson(ZkStateReader.ROLES);
@@ -83,7 +92,7 @@ public class ImplicitSnitch extends Snitch {
         Map.Entry e = (Map.Entry) o;
         if (e.getValue() instanceof List) {
           if(((List) e.getValue()).contains(solrNode)) {
-            ctx.getTags().put(ROLE, e.getKey());
+            ctx.getTags().put(key, e.getKey());
             break;
           }
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java
index 3bb081b..69a353e 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java
@@ -44,10 +44,6 @@ public abstract class SnitchContext implements RemoteCallback {
     this.session = session;
   }
 
-  public SnitchInfo getSnitchInfo() {
-    return snitchInfo;
-  }
-
   public Map<String, Object> getTags() {
     return tags;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
index 589ef7e..dad586e 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
@@ -181,6 +181,8 @@ public interface CommonParams {
   String AUTHC_PATH = "/admin/authentication";
   String ZK_PATH = "/admin/zookeeper";
   String METRICS_PATH = "/admin/metrics";
+  String AUTOSCALING_PATH = "/admin/autoscaling";
+  String AUTOSCALING_DIAGNOSTICS_PATH = "/admin/autoscaling/diagnostics";
 
   Set<String> ADMIN_PATHS = new HashSet<>(Arrays.asList(
       CORES_HANDLER_PATH,
@@ -188,7 +190,9 @@ public interface CommonParams {
       CONFIGSETS_HANDLER_PATH,
       AUTHC_PATH,
       AUTHZ_PATH,
-      METRICS_PATH));
+      METRICS_PATH,
+      AUTOSCALING_PATH,
+      AUTOSCALING_DIAGNOSTICS_PATH));
 
   /** valid values for: <code>echoParams</code> */
   enum EchoParamStyle {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
index 5b043e7..f995d09 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
@@ -24,6 +24,7 @@ import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.solr.common.SolrException;
 import org.noggit.JSONParser;
@@ -59,17 +60,18 @@ public class CommandOperation {
   }
 
   public boolean getBoolean(String key, boolean def) {
-    String v = getStr(key,null);
-    return v == null? def:Boolean.parseBoolean(v);
+    String v = getStr(key, null);
+    return v == null ? def : Boolean.parseBoolean(v);
   }
-  public void setCommandData(Object o){
+
+  public void setCommandData(Object o) {
     commandData = o;
   }
 
-  public Map<String,Object> getDataMap() {
+  public Map<String, Object> getDataMap() {
     if (commandData instanceof Map) {
       //noinspection unchecked
-      return (Map<String,Object>)commandData;
+      return (Map<String, Object>) commandData;
     }
     addError(StrUtils.formatString("The command ''{0}'' should have the values as a json object {key:val} format", name));
     return Collections.emptyMap();
@@ -89,7 +91,7 @@ public class CommandOperation {
   }
 
   private Object getMapVal(String key) {
-    if("".equals(key)){
+    if ("".equals(key)) {
       if (commandData instanceof Map) {
         addError("value of the command is an object should be primitive");
       }
@@ -183,10 +185,10 @@ public class CommandOperation {
    * Get all the values from the metadata for the command
    * without the specified keys
    */
-  public Map<String,Object> getValuesExcluding(String... keys) {
+  public Map<String, Object> getValuesExcluding(String... keys) {
     getMapVal(null);
     if (hasError()) return emptyMap();//just to verify the type is Map
-    @SuppressWarnings("unchecked") 
+    @SuppressWarnings("unchecked")
     LinkedHashMap<String, Object> cp = new LinkedHashMap<>((Map<String, Object>) commandData);
     if (keys == null) return cp;
     for (String key : keys) {
@@ -213,11 +215,19 @@ public class CommandOperation {
     return errors;
   }
 
+  public static List<CommandOperation> parse(Reader rdr) throws IOException {
+    return parse(rdr, Collections.emptySet());
+
+  }
 
   /**
    * Parse the command operations into command objects
+   *
+   * @param rdr               The payload
+   * @param singletonCommands commands that cannot be repeated
+   * @return parsed list of commands
    */
-  public static List<CommandOperation> parse(Reader rdr) throws IOException {
+  public static List<CommandOperation> parse(Reader rdr, Set<String> singletonCommands) throws IOException {
     JSONParser parser = new JSONParser(rdr);
 
     ObjectBuilder ob = new ObjectBuilder(parser);
@@ -232,7 +242,7 @@ public class CommandOperation {
       Object key = ob.getKey();
       ev = parser.nextEvent();
       Object val = ob.getVal();
-      if (val instanceof List) {
+      if (val instanceof List && !singletonCommands.contains(key)) {
         List list = (List) val;
         for (Object o : list) {
           if (!(o instanceof Map)) {
@@ -270,7 +280,21 @@ public class CommandOperation {
     return new String(toJSON(singletonMap(name, commandData)), StandardCharsets.UTF_8);
   }
 
-  public static List<CommandOperation> readCommands(Iterable<ContentStream> streams, NamedList resp)
+  public static List<CommandOperation> readCommands(Iterable<ContentStream> streams, NamedList resp) throws IOException {
+    return readCommands(streams, resp, Collections.emptySet());
+  }
+
+
+  /**
+   * Read commands from request streams
+   *
+   * @param streams           the streams
+   * @param resp              solr query response
+   * @param singletonCommands , commands that cannot be repeated
+   * @return parsed list of commands
+   * @throws IOException if there is an error while parsing the stream
+   */
+  public static List<CommandOperation> readCommands(Iterable<ContentStream> streams, NamedList resp, Set<String> singletonCommands)
       throws IOException {
     if (streams == null) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "missing content stream");
@@ -278,7 +302,7 @@ public class CommandOperation {
     ArrayList<CommandOperation> ops = new ArrayList<>();
 
     for (ContentStream stream : streams)
-      ops.addAll(parse(stream.getReader()));
+      ops.addAll(parse(stream.getReader(), singletonCommands));
     List<Map> errList = CommandOperation.captureErrors(ops);
     if (!errList.isEmpty()) {
       resp.add(CommandOperation.ERR_MSGS, errList);
@@ -312,7 +336,7 @@ public class CommandOperation {
 
   public Integer getInt(String name) {
     Object o = getVal(name);
-    if(o == null) return null;
+    if (o == null) return null;
     return getInt(name, null);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
index def3571..d9843e1 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
@@ -33,6 +33,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.solr.common.EnumFieldValue;
 import org.apache.solr.common.IteratorWriter;
@@ -390,7 +392,18 @@ public class JavaBinCodec implements PushWriter {
       writeMap(((MapSerializable) val).toMap(new NamedList().asShallowMap()));
       return true;
     }
-
+    if (val instanceof AtomicInteger) {
+      writeInt(((AtomicInteger) val).get());
+      return true;
+    }
+    if (val instanceof AtomicLong) {
+      writeLong(((AtomicLong) val).get());
+      return true;
+    }
+    if (val instanceof AtomicBoolean) {
+      writeBoolean(((AtomicBoolean) val).get());
+      return true;
+    }
     return false;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e5d8ed39/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
index 4cb6b8e..cf83dee 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
@@ -31,11 +31,15 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.http.HttpEntity;
 import org.apache.http.util.EntityUtils;
+import org.apache.solr.common.IteratorWriter;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.noggit.CharArr;
 import org.noggit.JSONParser;
@@ -51,31 +55,58 @@ public class Utils {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static Map getDeepCopy(Map map, int maxDepth) {
-    return getDeepCopy(map, maxDepth, true);
+    return getDeepCopy(map, maxDepth, true, false);
   }
 
   public static Map getDeepCopy(Map map, int maxDepth, boolean mutable) {
+    return getDeepCopy(map, maxDepth, mutable, false);
+  }
+
+  public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) {
     if(map == null) return null;
     if (maxDepth < 1) return map;
-    Map copy = new LinkedHashMap();
+    Map copy;
+    if (sorted) {
+      copy = new TreeMap();
+    } else {
+      copy = new LinkedHashMap();
+    }
     for (Object o : map.entrySet()) {
       Map.Entry e = (Map.Entry) o;
-      Object v = e.getValue();
-      if (v instanceof Map) v = getDeepCopy((Map) v, maxDepth - 1, mutable);
-      else if (v instanceof Collection) v = getDeepCopy((Collection) v, maxDepth - 1, mutable);
-      copy.put(e.getKey(), v);
+      copy.put(e.getKey(), makeDeepCopy(e.getValue(),maxDepth, mutable, sorted));
     }
     return mutable ? copy : Collections.unmodifiableMap(copy);
   }
 
+  private static Object makeDeepCopy(Object v, int maxDepth, boolean mutable, boolean sorted) {
+    if (v instanceof MapWriter && maxDepth > 1) {
+      v = ((MapWriter) v).toMap(new LinkedHashMap<>());
+    } else if (v instanceof IteratorWriter && maxDepth > 1) {
+      v = ((IteratorWriter) v).toList(new ArrayList<>());
+      if (sorted) {
+        Collections.sort((List)v);
+      }
+    }
+
+    if (v instanceof Map) {
+      v = getDeepCopy((Map) v, maxDepth - 1, mutable, sorted);
+    } else if (v instanceof Collection) {
+      v = getDeepCopy((Collection) v, maxDepth - 1, mutable, sorted);
+    }
+    return v;
+  }
+
   public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) {
+    return getDeepCopy(c, maxDepth, mutable, false);
+  }
+
+  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) {
     if (c == null || maxDepth < 1) return c;
-    Collection result = c instanceof Set ? new HashSet() : new ArrayList();
-    for (Object o : c) {
-      if (o instanceof Map) {
-        o = getDeepCopy((Map) o, maxDepth - 1, mutable);
-      }
-      result.add(o);
+    Collection result = c instanceof Set ?
+        ( sorted? new TreeSet() : new HashSet()) : new ArrayList();
+    for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted));
+    if (sorted && (result instanceof List)) {
+      Collections.sort((List)result);
     }
     return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result);
   }
@@ -83,6 +114,13 @@ public class Utils {
   public static byte[] toJSON(Object o) {
     if(o == null) return new byte[0];
     CharArr out = new CharArr();
+    if (!(o instanceof List) && !(o instanceof Map)) {
+      if (o instanceof MapWriter)  {
+        o = ((MapWriter)o).toMap(new LinkedHashMap<>());
+      } else if(o instanceof IteratorWriter){
+        o = ((IteratorWriter)o).toList(new ArrayList<>());
+      }
+    }
     new JSONWriter(out, 2).write(o); // indentation by default
     return toUTF8(out);
   }
@@ -112,12 +150,18 @@ public class Utils {
   }
 
   public static Map<String, Object> makeMap(Object... keyVals) {
+    return makeMap(false, keyVals);
+  }
+
+  public static Map<String, Object> makeMap(boolean skipNulls, Object... keyVals) {
     if ((keyVals.length & 0x01) != 0) {
       throw new IllegalArgumentException("arguments should be key,value");
     }
     Map<String, Object> propMap = new LinkedHashMap<>(keyVals.length >> 1);
     for (int i = 0; i < keyVals.length; i += 2) {
-      propMap.put(keyVals[i].toString(), keyVals[i + 1]);
+      Object keyVal = keyVals[i + 1];
+      if (skipNulls && keyVal == null) continue;
+      propMap.put(keyVals[i].toString(), keyVal);
     }
     return propMap;
   }
@@ -152,6 +196,7 @@ public class Utils {
   }
 
   public static Object getObjectByPath(Map root, boolean onlyPrimitive, List<String> hierarchy) {
+    if(root == null) return null;
     Map obj = root;
     for (int i = 0; i < hierarchy.size(); i++) {
       int idx = -1;


Mime
View raw message