hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From whe...@apache.org
Subject hadoop git commit: HADOOP-12507. Move retry policy and writable interfaces to hadoop-common-client. Contributed by Haohui Mai.
Date Mon, 26 Oct 2015 19:10:48 GMT
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12499 b6d2567ee -> bf8349d61


HADOOP-12507. Move retry policy and writable interfaces to hadoop-common-client. Contributed
by Haohui Mai.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bf8349d6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bf8349d6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bf8349d6

Branch: refs/heads/HADOOP-12499
Commit: bf8349d61166fa13d12dc6cc68643ef82f906bde
Parents: b6d2567
Author: Haohui Mai <wheat9@apache.org>
Authored: Sat Sep 12 12:15:28 2015 -0700
Committer: Haohui Mai <wheat9@apache.org>
Committed: Mon Oct 26 12:10:30 2015 -0700

----------------------------------------------------------------------
 .../hadoop-common-client/pom.xml                |  7 --
 .../java/org/apache/hadoop/io/Writable.java     | 88 +++++++++++++++++
 .../apache/hadoop/io/WritableComparable.java    | 74 +++++++++++++++
 .../org/apache/hadoop/io/WritableFactory.java   | 32 +++++++
 .../org/apache/hadoop/io/retry/AtMostOnce.java  | 41 ++++++++
 .../org/apache/hadoop/io/retry/Idempotent.java  | 35 +++++++
 .../org/apache/hadoop/io/retry/RetryPolicy.java | 99 ++++++++++++++++++++
 .../java/org/apache/hadoop/io/Writable.java     | 88 -----------------
 .../apache/hadoop/io/WritableComparable.java    | 74 ---------------
 .../org/apache/hadoop/io/WritableFactory.java   | 33 -------
 .../org/apache/hadoop/io/retry/AtMostOnce.java  | 41 --------
 .../org/apache/hadoop/io/retry/Idempotent.java  | 35 -------
 .../org/apache/hadoop/io/retry/RetryPolicy.java | 99 --------------------
 13 files changed, 369 insertions(+), 377 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common-client/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common-client/pom.xml b/hadoop-common-project/hadoop-common-client/pom.xml
index cd360c5..c9e634b 100644
--- a/hadoop-common-project/hadoop-common-client/pom.xml
+++ b/hadoop-common-project/hadoop-common-client/pom.xml
@@ -47,13 +47,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
           </excludes>
         </configuration>
       </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-javadoc-plugin</artifactId>
-        <configuration>
-          <excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
-        </configuration>
-      </plugin>
     </plugins>
   </build>
 </project>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/Writable.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/Writable.java
b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/Writable.java
new file mode 100644
index 0000000..f0fe6fb
--- /dev/null
+++ b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/Writable.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io;
+
+import java.io.DataOutput;
+import java.io.DataInput;
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A serializable object which implements a simple, efficient, serialization 
+ * protocol, based on {@link DataInput} and {@link DataOutput}.
+ *
+ * <p>Any <code>key</code> or <code>value</code> type in the
Hadoop Map-Reduce
+ * framework implements this interface.</p>
+ * 
+ * <p>Implementations typically implement a static <code>read(DataInput)</code>
+ * method which constructs a new instance, calls {@link #readFields(DataInput)} 
+ * and returns the instance.</p>
+ * 
+ * <p>Example:</p>
+ * <p><blockquote><pre>
+ *     public class MyWritable implements Writable {
+ *       // Some data
+ *       private int counter;
+ *       private long timestamp;
+ *
+ *       // Default constructor to allow (de)serialization
+ *       MyWritable() { }
+ *
+ *       public void write(DataOutput out) throws IOException {
+ *         out.writeInt(counter);
+ *         out.writeLong(timestamp);
+ *       }
+ *
+ *       public void readFields(DataInput in) throws IOException {
+ *         counter = in.readInt();
+ *         timestamp = in.readLong();
+ *       }
+ *
+ *       public static MyWritable read(DataInput in) throws IOException {
+ *         MyWritable w = new MyWritable();
+ *         w.readFields(in);
+ *         return w;
+ *       }
+ *     }
+ * </pre></blockquote></p>
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public interface Writable {
+  /** 
+   * Serialize the fields of this object to <code>out</code>.
+   * 
+   * @param out <code>DataOuput</code> to serialize this object into.
+   * @throws IOException
+   */
+  void write(DataOutput out) throws IOException;
+
+  /** 
+   * Deserialize the fields of this object from <code>in</code>.  
+   * 
+   * <p>For efficiency, implementations should attempt to re-use storage in the 
+   * existing object where possible.</p>
+   * 
+   * @param in <code>DataInput</code> to deseriablize this object from.
+   * @throws IOException
+   */
+  void readFields(DataInput in) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/WritableComparable.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/WritableComparable.java
b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/WritableComparable.java
new file mode 100644
index 0000000..b030481
--- /dev/null
+++ b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/WritableComparable.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A {@link Writable} which is also {@link Comparable}. 
+ *
+ * <p><code>WritableComparable</code>s can be compared to each other, typically

+ * via <code>Comparator</code>s. Any type which is to be used as a 
+ * <code>key</code> in the Hadoop Map-Reduce framework should implement this
+ * interface.</p>
+ *
+ * <p>Note that <code>hashCode()</code> is frequently used in Hadoop to
partition
+ * keys. It's important that your implementation of hashCode() returns the same 
+ * result across different instances of the JVM. Note also that the default 
+ * <code>hashCode()</code> implementation in <code>Object</code>
does <b>not</b>
+ * satisfy this property.</p>
+ *  
+ * <p>Example:</p>
+ * <p><blockquote><pre>
+ *     public class MyWritableComparable implements WritableComparable<MyWritableComparable>
{
+ *       // Some data
+ *       private int counter;
+ *       private long timestamp;
+ *       
+ *       public void write(DataOutput out) throws IOException {
+ *         out.writeInt(counter);
+ *         out.writeLong(timestamp);
+ *       }
+ *       
+ *       public void readFields(DataInput in) throws IOException {
+ *         counter = in.readInt();
+ *         timestamp = in.readLong();
+ *       }
+ *       
+ *       public int compareTo(MyWritableComparable o) {
+ *         int thisValue = this.value;
+ *         int thatValue = o.value;
+ *         return (thisValue &lt; thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
+ *       }
+ *
+ *       public int hashCode() {
+ *         final int prime = 31;
+ *         int result = 1;
+ *         result = prime * result + counter;
+ *         result = prime * result + (int) (timestamp ^ (timestamp &gt;&gt;&gt;
32));
+ *         return result
+ *       }
+ *     }
+ * </pre></blockquote></p>
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public interface WritableComparable<T> extends Writable, Comparable<T> {
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/WritableFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/WritableFactory.java
b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/WritableFactory.java
new file mode 100644
index 0000000..6f39d14
--- /dev/null
+++ b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/WritableFactory.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/** A factory for a class of Writable.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public interface WritableFactory {
+  /** Return a new instance. */
+  Writable newInstance();
+}
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/AtMostOnce.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/AtMostOnce.java
b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/AtMostOnce.java
new file mode 100644
index 0000000..624bb43
--- /dev/null
+++ b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/AtMostOnce.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.retry;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Used to mark certain methods of an interface with at-most-once semantics.
+ * 
+ * Server must guarantee that methods are executed at most once, by keeping
+ * a retry cache. The previous response must be returned when duplicate 
+ * requests are received. Because of these guarantee, a client can retry
+ * this request on failover and other network failure conditions.
+ */
+@Inherited
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.METHOD)
+@InterfaceStability.Evolving
+public @interface AtMostOnce {
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/Idempotent.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/Idempotent.java
b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/Idempotent.java
new file mode 100644
index 0000000..073752d
--- /dev/null
+++ b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/Idempotent.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.retry;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Used to mark certain methods of an interface as being idempotent, and
+ * therefore warrant being retried on failover.
+ */
+@Inherited
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.METHOD)
+@InterfaceStability.Evolving
+public @interface Idempotent {}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
new file mode 100644
index 0000000..f3e2bd1
--- /dev/null
+++ b/hadoop-common-project/hadoop-common-client/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.retry;
+
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * <p>
+ * Specifies a policy for retrying method failures.
+ * Implementations of this interface should be immutable.
+ * </p>
+ */
+@InterfaceStability.Evolving
+public interface RetryPolicy {
+  
+  /**
+   * Returned by {@link RetryPolicy#shouldRetry(Exception, int, int, boolean)}.
+   */
+  @InterfaceStability.Evolving
+  public static class RetryAction {
+    
+    // A few common retry policies, with no delays.
+    public static final RetryAction FAIL =
+        new RetryAction(RetryDecision.FAIL);
+    public static final RetryAction RETRY =
+        new RetryAction(RetryDecision.RETRY);
+    public static final RetryAction FAILOVER_AND_RETRY =
+        new RetryAction(RetryDecision.FAILOVER_AND_RETRY);
+    
+    public final RetryDecision action;
+    public final long delayMillis;
+    public final String reason;
+    
+    public RetryAction(RetryDecision action) {
+      this(action, 0, null);
+    }
+    
+    public RetryAction(RetryDecision action, long delayTime) {
+      this(action, delayTime, null);
+    }
+    
+    public RetryAction(RetryDecision action, long delayTime, String reason) {
+      this.action = action;
+      this.delayMillis = delayTime;
+      this.reason = reason;
+    }
+    
+    @Override
+    public String toString() {
+      return getClass().getSimpleName() + "(action=" + action
+          + ", delayMillis=" + delayMillis + ", reason=" + reason + ")";
+    }
+    
+    public enum RetryDecision {
+      FAIL,
+      RETRY,
+      FAILOVER_AND_RETRY
+    }
+  }
+  
+  /**
+   * <p>
+   * Determines whether the framework should retry a method for the given
+   * exception, and the number of retries that have been made for that operation
+   * so far.
+   * </p>
+   * 
+   * @param e The exception that caused the method to fail
+   * @param retries The number of times the method has been retried
+   * @param failovers The number of times the method has failed over to a
+   *          different backend implementation
+   * @param isIdempotentOrAtMostOnce <code>true</code> if the method is
+   *          {@link Idempotent} or {@link AtMostOnce} and so can reasonably be
+   *          retried on failover when we don't know if the previous attempt
+   *          reached the server or not
+   * @return <code>true</code> if the method should be retried,
+   *         <code>false</code> if the method should not be retried but
+   *         shouldn't fail with an exception (only for void methods)
+   * @throws Exception The re-thrown exception <code>e</code> indicating that
+   *           the method failed and should not be retried further
+   */
+  public RetryAction shouldRetry(Exception e, int retries, int failovers,
+      boolean isIdempotentOrAtMostOnce) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java
deleted file mode 100644
index f0fe6fb..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.io;
-
-import java.io.DataOutput;
-import java.io.DataInput;
-import java.io.IOException;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * A serializable object which implements a simple, efficient, serialization 
- * protocol, based on {@link DataInput} and {@link DataOutput}.
- *
- * <p>Any <code>key</code> or <code>value</code> type in the
Hadoop Map-Reduce
- * framework implements this interface.</p>
- * 
- * <p>Implementations typically implement a static <code>read(DataInput)</code>
- * method which constructs a new instance, calls {@link #readFields(DataInput)} 
- * and returns the instance.</p>
- * 
- * <p>Example:</p>
- * <p><blockquote><pre>
- *     public class MyWritable implements Writable {
- *       // Some data
- *       private int counter;
- *       private long timestamp;
- *
- *       // Default constructor to allow (de)serialization
- *       MyWritable() { }
- *
- *       public void write(DataOutput out) throws IOException {
- *         out.writeInt(counter);
- *         out.writeLong(timestamp);
- *       }
- *
- *       public void readFields(DataInput in) throws IOException {
- *         counter = in.readInt();
- *         timestamp = in.readLong();
- *       }
- *
- *       public static MyWritable read(DataInput in) throws IOException {
- *         MyWritable w = new MyWritable();
- *         w.readFields(in);
- *         return w;
- *       }
- *     }
- * </pre></blockquote></p>
- */
-@InterfaceAudience.Public
-@InterfaceStability.Stable
-public interface Writable {
-  /** 
-   * Serialize the fields of this object to <code>out</code>.
-   * 
-   * @param out <code>DataOuput</code> to serialize this object into.
-   * @throws IOException
-   */
-  void write(DataOutput out) throws IOException;
-
-  /** 
-   * Deserialize the fields of this object from <code>in</code>.  
-   * 
-   * <p>For efficiency, implementations should attempt to re-use storage in the 
-   * existing object where possible.</p>
-   * 
-   * @param in <code>DataInput</code> to deseriablize this object from.
-   * @throws IOException
-   */
-  void readFields(DataInput in) throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java
deleted file mode 100644
index b030481..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.io;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * A {@link Writable} which is also {@link Comparable}. 
- *
- * <p><code>WritableComparable</code>s can be compared to each other, typically

- * via <code>Comparator</code>s. Any type which is to be used as a 
- * <code>key</code> in the Hadoop Map-Reduce framework should implement this
- * interface.</p>
- *
- * <p>Note that <code>hashCode()</code> is frequently used in Hadoop to
partition
- * keys. It's important that your implementation of hashCode() returns the same 
- * result across different instances of the JVM. Note also that the default 
- * <code>hashCode()</code> implementation in <code>Object</code>
does <b>not</b>
- * satisfy this property.</p>
- *  
- * <p>Example:</p>
- * <p><blockquote><pre>
- *     public class MyWritableComparable implements WritableComparable<MyWritableComparable>
{
- *       // Some data
- *       private int counter;
- *       private long timestamp;
- *       
- *       public void write(DataOutput out) throws IOException {
- *         out.writeInt(counter);
- *         out.writeLong(timestamp);
- *       }
- *       
- *       public void readFields(DataInput in) throws IOException {
- *         counter = in.readInt();
- *         timestamp = in.readLong();
- *       }
- *       
- *       public int compareTo(MyWritableComparable o) {
- *         int thisValue = this.value;
- *         int thatValue = o.value;
- *         return (thisValue &lt; thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
- *       }
- *
- *       public int hashCode() {
- *         final int prime = 31;
- *         int result = 1;
- *         result = prime * result + counter;
- *         result = prime * result + (int) (timestamp ^ (timestamp &gt;&gt;&gt;
32));
- *         return result
- *       }
- *     }
- * </pre></blockquote></p>
- */
-@InterfaceAudience.Public
-@InterfaceStability.Stable
-public interface WritableComparable<T> extends Writable, Comparable<T> {
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java
deleted file mode 100644
index bb8af97..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.io;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/** A factory for a class of Writable.
- * @see WritableFactories
- */
-@InterfaceAudience.Public
-@InterfaceStability.Stable
-public interface WritableFactory {
-  /** Return a new instance. */
-  Writable newInstance();
-}
-

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AtMostOnce.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AtMostOnce.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AtMostOnce.java
deleted file mode 100644
index 624bb43..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AtMostOnce.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.retry;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Inherited;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * Used to mark certain methods of an interface with at-most-once semantics.
- * 
- * Server must guarantee that methods are executed at most once, by keeping
- * a retry cache. The previous response must be returned when duplicate 
- * requests are received. Because of these guarantee, a client can retry
- * this request on failover and other network failure conditions.
- */
-@Inherited
-@Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.METHOD)
-@InterfaceStability.Evolving
-public @interface AtMostOnce {
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/Idempotent.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/Idempotent.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/Idempotent.java
deleted file mode 100644
index 073752d..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/Idempotent.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.retry;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Inherited;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * Used to mark certain methods of an interface as being idempotent, and
- * therefore warrant being retried on failover.
- */
-@Inherited
-@Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.METHOD)
-@InterfaceStability.Evolving
-public @interface Idempotent {}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf8349d6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
deleted file mode 100644
index f3e2bd1..0000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.retry;
-
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * <p>
- * Specifies a policy for retrying method failures.
- * Implementations of this interface should be immutable.
- * </p>
- */
-@InterfaceStability.Evolving
-public interface RetryPolicy {
-  
-  /**
-   * Returned by {@link RetryPolicy#shouldRetry(Exception, int, int, boolean)}.
-   */
-  @InterfaceStability.Evolving
-  public static class RetryAction {
-    
-    // A few common retry policies, with no delays.
-    public static final RetryAction FAIL =
-        new RetryAction(RetryDecision.FAIL);
-    public static final RetryAction RETRY =
-        new RetryAction(RetryDecision.RETRY);
-    public static final RetryAction FAILOVER_AND_RETRY =
-        new RetryAction(RetryDecision.FAILOVER_AND_RETRY);
-    
-    public final RetryDecision action;
-    public final long delayMillis;
-    public final String reason;
-    
-    public RetryAction(RetryDecision action) {
-      this(action, 0, null);
-    }
-    
-    public RetryAction(RetryDecision action, long delayTime) {
-      this(action, delayTime, null);
-    }
-    
-    public RetryAction(RetryDecision action, long delayTime, String reason) {
-      this.action = action;
-      this.delayMillis = delayTime;
-      this.reason = reason;
-    }
-    
-    @Override
-    public String toString() {
-      return getClass().getSimpleName() + "(action=" + action
-          + ", delayMillis=" + delayMillis + ", reason=" + reason + ")";
-    }
-    
-    public enum RetryDecision {
-      FAIL,
-      RETRY,
-      FAILOVER_AND_RETRY
-    }
-  }
-  
-  /**
-   * <p>
-   * Determines whether the framework should retry a method for the given
-   * exception, and the number of retries that have been made for that operation
-   * so far.
-   * </p>
-   * 
-   * @param e The exception that caused the method to fail
-   * @param retries The number of times the method has been retried
-   * @param failovers The number of times the method has failed over to a
-   *          different backend implementation
-   * @param isIdempotentOrAtMostOnce <code>true</code> if the method is
-   *          {@link Idempotent} or {@link AtMostOnce} and so can reasonably be
-   *          retried on failover when we don't know if the previous attempt
-   *          reached the server or not
-   * @return <code>true</code> if the method should be retried,
-   *         <code>false</code> if the method should not be retried but
-   *         shouldn't fail with an exception (only for void methods)
-   * @throws Exception The re-thrown exception <code>e</code> indicating that
-   *           the method failed and should not be retried further
-   */
-  public RetryAction shouldRetry(Exception e, int retries, int failovers,
-      boolean isIdempotentOrAtMostOnce) throws Exception;
-}


Mime
View raw message