hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From brando...@apache.org
Subject svn commit: r1493924 [1/3] - in /hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src: main/java/org/apache/hadoop/mount/ main/java/org/apache/hadoop/nfs/ main/java/org/apache/hadoop/nfs/nfs3/ main/java/org/apache/hadoop/nfs/nfs3/request/ main/java...
Date Mon, 17 Jun 2013 20:32:14 GMT
Author: brandonli
Date: Mon Jun 17 20:32:13 2013
New Revision: 1493924

URL: http://svn.apache.org/r1493924
Log:
HADOOP-9515. Add general interface for NFS and Mount. Contributed by Brandon Li

Added:
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountEntry.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountInterface.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsFileType.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsTime.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Constant.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Interface.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Status.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/ACCESS3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/COMMIT3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSINFO3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSSTAT3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/GETATTR3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/PATHCONF3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READ3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIR3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIRPLUS3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READLINK3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RequestWithHandle.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SETATTR3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SetAttr3.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/WRITE3Request.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/VoidResponse.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccAttr.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccData.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsTime.java
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/
    hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestFileHandle.java

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountEntry.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountEntry.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountEntry.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountEntry.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mount;
+
+/**
+ * Represents a mount entry.
+ */
+public class MountEntry {
+  /** Host correspoinding to the mount entry */
+  private final String host;
+  /** Path correspoinding to the mount entry */
+  private final String path;
+
+  public MountEntry(String host, String path) {
+    this.host = host;
+    this.path = path;
+  }
+
+  public String host() {
+    return this.host;
+  }
+
+  public String path() {
+    return this.path;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o)
+      return true;
+
+    if (!(o instanceof MountEntry)) {
+      return false;
+    }
+
+    MountEntry m = (MountEntry) o;
+    return host().equals(m.host()) && path().equals(m.path());
+  }
+
+  @Override
+  public int hashCode() {
+    return host.hashCode() * 31 + path.hashCode();
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountInterface.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountInterface.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountInterface.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountInterface.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mount;
+
+import java.net.InetAddress;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * This is an interface that should be implemented for handle Mountd related
+ * requests. See RFC 1094 for more details.
+ */
+public interface MountInterface {
+  public static int MNTPROC_NULL = 0;
+  public static int MNTPROC_MNT = 1;
+  public static int MNTPROC_DUMP = 2;
+  public static int MNTPROC_UMNT = 3;
+  public static int MNTPROC_UMNTALL = 4;
+  public static int MNTPROC_EXPORT = 5;
+  public static int MNTPROC_EXPORTALL = 6;
+  public static int MNTPROC_PATHCONF = 7;
+
+  /** MNTPROC_NULL - Do Nothing */
+  public XDR nullOp(XDR out, int xid, InetAddress client);
+
+  /** MNTPROC_MNT - Add mount entry */
+  public XDR mnt(XDR xdr, XDR out, int xid, InetAddress client);
+
+  /** MNTPROC_DUMP - Return mount entries */
+  public XDR dump(XDR out, int xid, InetAddress client);
+
+  /** MNTPROC_UMNT - Remove mount entry */
+  public XDR umnt(XDR xdr, XDR out, int xid, InetAddress client);
+
+  /** MNTPROC_UMNTALL - Remove all mount entries */
+  public XDR umntall(XDR out, int xid, InetAddress client);
+  
+  /** MNTPROC_EXPORT and MNTPROC_EXPORTALL - Return export list */
+  //public XDR exportall(XDR out, int xid, InetAddress client);
+  
+  /** MNTPROC_PATHCONF - POSIX pathconf information */
+  //public XDR pathconf(XDR out, int xid, InetAddress client);
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mount;
+
+import java.util.List;
+
+import org.apache.hadoop.oncrpc.RpcAcceptedReply;
+import org.apache.hadoop.oncrpc.XDR;
+import org.apache.hadoop.oncrpc.RpcAuthInfo.AuthFlavor;
+
+/**
+ * Helper class for sending MountResponse
+ */
+public class MountResponse {
+  public final static int MNT_OK = 0;
+  
+  /** Hidden constructor */
+  private MountResponse() {
+  }
+  
+  /** Response for RPC call {@link MountInterface#MNTPROC_MNT} */
+  public static XDR writeMNTResponse(int status, XDR xdr, int xid,
+      byte[] handle) {
+    RpcAcceptedReply.voidReply(xdr, xid);
+    xdr.writeInt(status);
+    if (status == MNT_OK) {
+      xdr.writeInt(handle.length);
+      xdr.writeFixedOpaque(handle);
+      // Only MountV3 returns a list of supported authFlavors
+      xdr.writeInt(1);
+      xdr.writeInt(AuthFlavor.AUTH_SYS.getValue());
+    }
+    return xdr;
+  }
+
+  /** Response for RPC call {@link MountInterface#MNTPROC_DUMP} */
+  public static XDR writeMountList(XDR xdr, int xid, List<MountEntry> mounts) {
+    RpcAcceptedReply.voidReply(xdr, xid);
+    for (MountEntry mountEntry : mounts) {
+      xdr.writeBoolean(true); // Value follows yes
+      xdr.writeString(mountEntry.host());
+      xdr.writeString(mountEntry.path());
+    }
+    xdr.writeBoolean(false); // Value follows no
+    return xdr;
+  }
+
+  /** Response for RPC call {@link MountInterface#MNTPROC_EXPORT} */
+  public static XDR writeExportList(XDR xdr, int xid, List<String> exports) {
+    RpcAcceptedReply.voidReply(xdr, xid);
+    for (String export : exports) {
+      xdr.writeBoolean(true); // Value follows - yes
+      xdr.writeString(export);
+      xdr.writeInt(0);
+    }
+    xdr.writeBoolean(false); // Value follows - no
+    return xdr;
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mount;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.oncrpc.RpcProgram;
+import org.apache.hadoop.oncrpc.SimpleTcpServer;
+import org.apache.hadoop.oncrpc.SimpleUdpServer;
+import org.apache.hadoop.portmap.PortmapMapping;
+
+/**
+ * Main class for starting mountd daemon. This daemon implements the NFS
+ * mount protocol. When receiving a MOUNT request from an NFS client, it checks
+ * the request against the list of currently exported file systems. If the
+ * client is permitted to mount the file system, rpc.mountd obtains a file
+ * handle for requested directory and returns it to the client.
+ */
+abstract public class MountdBase {
+  private final RpcProgram rpcProgram;
+
+  public RpcProgram getRpcProgram() {
+    return rpcProgram;
+  }
+  
+  /**
+   * Constructor
+   * @param exports
+   * @throws IOException 
+   */
+  public MountdBase(List<String> exports, RpcProgram program) throws IOException {
+    rpcProgram = program;
+  }
+
+  /* Start UDP server */
+  private void startUDPServer() {
+    rpcProgram.register(PortmapMapping.TRANSPORT_UDP);
+    SimpleUdpServer udpServer = new SimpleUdpServer(rpcProgram.getPort(),
+        rpcProgram, 1);
+    udpServer.run();
+  }
+
+  /* Start TCP server */
+  private void startTCPServer() {
+    rpcProgram.register(PortmapMapping.TRANSPORT_TCP);
+    SimpleTcpServer tcpServer = new SimpleTcpServer(rpcProgram.getPort(),
+        rpcProgram, 1);
+    tcpServer.run();
+  }
+
+  public void start() {
+    startUDPServer();
+    startTCPServer();
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsFileType.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsFileType.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsFileType.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsFileType.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs;
+
+/**
+ * Class encapsulates different types of files
+ */
+public enum NfsFileType {
+  NFSREG(1),    // a regular file
+  NFSDIR(2),    // a directory
+  NFSBLK(3),    // a block special device file
+  NFSCHR(4),    // a character special device
+  NFSLNK(5),    // a symbolic link
+  NFSSOCK(6),   // a socket
+  NFSFIFO(7);   // a named pipe
+  
+  private final int value;
+  
+  NfsFileType(int val) {
+    value = val;
+  }
+  
+  public int toValue() {
+    return value;
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsTime.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsTime.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsTime.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsTime.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * Class that encapsulates time.
+ */
+public class NfsTime {
+  static final int MILLISECONDS_IN_SECOND = 1000;
+  static final int NANOSECONDS_IN_MILLISECOND = 1000000;
+  private final int seconds;
+  private final int nseconds;
+
+  public NfsTime(int seconds, int nseconds) {
+    this.seconds = seconds;
+    this.nseconds = nseconds;
+  }
+
+  public NfsTime(NfsTime other) {
+    seconds = other.getNseconds();
+    nseconds = other.getNseconds();
+  }
+  
+  public NfsTime(long milliseconds) {
+    seconds = (int) (milliseconds / MILLISECONDS_IN_SECOND);
+    nseconds = (int) ((milliseconds - this.seconds * MILLISECONDS_IN_SECOND) * 
+        NANOSECONDS_IN_MILLISECOND);
+  }
+
+  public int getSeconds() {
+    return seconds;
+  }
+  
+  public int getNseconds() {
+    return nseconds;
+  }
+
+  /**
+   * Get the total time in milliseconds
+   */
+  public long getMilliSeconds() {
+    return (long) (seconds) * 1000 + (long) (nseconds) / 1000000;
+  }
+
+  public void serialize(XDR xdr) {
+    xdr.writeInt(getSeconds());
+    xdr.writeInt(getNseconds());
+  }
+
+  public static NfsTime deserialize(XDR xdr) {
+    return new NfsTime(xdr.readInt(), xdr.readInt());
+  }
+
+  @Override
+  public int hashCode() {
+    return seconds ^ nseconds;
+  }
+  
+  @Override
+  public boolean equals(Object o) {
+    if (!(o instanceof NfsTime)) {
+      return false;
+    }
+    return ((NfsTime) o).getMilliSeconds() == this.getMilliSeconds();
+  }
+  
+  @Override
+  public String toString() {
+    return "(NfsTime-" + seconds + "s, " + nseconds + "ns)";
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3;
+
+import java.nio.ByteBuffer;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Arrays;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * This is a file handle use by the NFS clients.
+ * Server returns this handle to the client, which is used by the client
+ * on subsequent operations to reference the file.
+ */
+public class FileHandle {
+  private static final Log LOG = LogFactory.getLog(FileHandle.class);
+  private static final String HEXES = "0123456789abcdef";
+  private static final int HANDLE_LEN = 32;
+  private byte[] handle; // Opaque handle
+  private long fileId = -1;
+
+  public FileHandle() {
+    handle = null;
+  }
+
+  /**
+   * Handle is a 32 bytes number. For HDFS, the last 8 bytes is fileId.
+   */
+  public FileHandle(long v) {
+    fileId = v;
+    handle = new byte[HANDLE_LEN];
+    handle[0] = (byte)(v >>> 56);
+    handle[1] = (byte)(v >>> 48);
+    handle[2] = (byte)(v >>> 40);
+    handle[3] = (byte)(v >>> 32);
+    handle[4] = (byte)(v >>> 24);
+    handle[5] = (byte)(v >>> 16);
+    handle[6] = (byte)(v >>>  8);
+    handle[7] = (byte)(v >>>  0);
+    for (int i = 8; i < HANDLE_LEN; i++) {
+      handle[i] = (byte) 0;
+    }
+  }
+  
+  public FileHandle(String s) {
+    MessageDigest digest;
+    try {
+      digest = MessageDigest.getInstance("MD5");
+      handle = new byte[HANDLE_LEN];
+    } catch (NoSuchAlgorithmException e) {
+      LOG.warn("MD5 MessageDigest unavailable.");
+      handle = null;
+      return;
+    }
+
+    byte[] in = s.getBytes();
+    for (int i = 0; i < in.length; i++) {
+      digest.update(in[i]);
+    }
+
+    byte[] digestbytes = digest.digest();
+    for (int i = 0; i < 16; i++) {
+      handle[i] = (byte) 0;
+    }
+
+    for (int i = 16; i < 32; i++) {
+      handle[i] = digestbytes[i - 16];
+    }
+  }
+
+  public boolean serialize(XDR out) {
+    out.writeInt(handle.length);
+    out.writeFixedOpaque(handle);
+    return true;
+  }
+
+  private long bytesToLong(byte[] data) {
+    ByteBuffer buffer = ByteBuffer.allocate(8);
+    for (int i = 0; i < 8; i++) {
+      buffer.put(data[i]);
+    }
+    buffer.flip();// need flip
+    return buffer.getLong();
+  }
+  
+  public boolean deserialize(XDR xdr) {
+    if (!XDR.verifyLength(xdr, 32)) {
+      return false;
+    }
+    int size = xdr.readInt();
+    handle = xdr.readFixedOpaque(size);
+    fileId = bytesToLong(handle);
+    return true;
+  }
+  
+  private static String hex(byte b) {
+    StringBuilder strBuilder = new StringBuilder();
+    strBuilder.append(HEXES.charAt((b & 0xF0) >> 4)).append(
+        HEXES.charAt((b & 0x0F)));
+    return strBuilder.toString();
+  }
+  
+  public long getFileId() {    
+    return fileId;
+  }
+  
+  public byte[] getContent() {
+    return handle.clone();
+  }
+  
+  @Override
+  public String toString() {
+    StringBuilder s = new StringBuilder();
+    for (int i = 0; i < handle.length; i++) {
+      s.append(hex(handle[i]));
+    }
+    return s.toString();
+  }
+  
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+
+    if (!(o instanceof FileHandle)) {
+      return false;
+    }
+
+    FileHandle h = (FileHandle) o;
+    return Arrays.equals(handle, h.handle);
+  }
+
+  @Override
+  public int hashCode() {
+    return Arrays.hashCode(handle);
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import com.google.common.collect.BiMap;
+import com.google.common.collect.HashBiMap;
+
+/**
+ * Map id to user name or group name. It does update every 15 minutes. Only a
+ * single instance of this class is expected to be on the server.
+ */
+public class IdUserGroup {
+  static final Log LOG = LogFactory.getLog(IdUserGroup.class);
+  private final static String OS = System.getProperty("os.name");
+
+  /** Shell commands to get users and groups */
+  static final String LINUX_GET_ALL_USERS_CMD = "getent passwd | cut -d: -f1,3";
+  static final String LINUX_GET_ALL_GROUPS_CMD = "getent group | cut -d: -f1,3";
+  static final String MAC_GET_ALL_USERS_CMD = "dscl . -list /Users UniqueID";
+  static final String MAC_GET_ALL_GROUPS_CMD = "dscl . -list /Groups PrimaryGroupID";
+
+  // Do update every 15 minutes
+  final static long TIMEOUT = 15 * 60 * 1000; // ms
+
+  // Maps for id to name map. Guarded by this object monitor lock */
+  private BiMap<Integer, String> uidNameMap = HashBiMap.create();
+  private BiMap<Integer, String> gidNameMap = HashBiMap.create();
+
+  private long lastUpdateTime = 0; // Last time maps were updated
+
+  public IdUserGroup() {
+    updateMaps();
+  }
+
+  private boolean isExpired() {
+    return lastUpdateTime - System.currentTimeMillis() > TIMEOUT;
+  }
+
+  private void checkAndUpdateMaps() {
+    if (isExpired()) {
+      LOG.info("Update cache now");
+      updateMaps();
+    }
+  }
+
+  /**
+   * Get the whole list of users and groups and save them in the maps.
+   */
+  private void updateMapInternal(BiMap<Integer, String> map, String name,
+      String command, String regex) throws IOException {
+    BufferedReader br = null;
+    try {
+      Process process = Runtime.getRuntime().exec(
+          new String[] { "bash", "-c", command });
+      br = new BufferedReader(new InputStreamReader(process.getInputStream()));
+      String line = null;
+      while ((line = br.readLine()) != null) {
+        String[] nameId = line.split(regex);
+        if ((nameId == null) || (nameId.length != 2)) {
+          throw new IOException("Can't parse " + name + " list entry:" + line);
+        }
+        LOG.debug("add " + name + ":" + nameId[0] + " id:" + nameId[1]);
+        map.put(Integer.valueOf(nameId[1]), nameId[0]);
+      }
+      LOG.info("Updated " + name + " map size:" + map.size());
+      
+    } catch (IOException e) {
+      LOG.error("Can't update map " + name);
+      throw e;
+    } finally {
+      if (br != null) {
+        try {
+          br.close();
+        } catch (IOException e1) {
+          LOG.error("Can't close BufferedReader of command result");
+          e1.printStackTrace();
+        }
+      }
+    }
+  }
+
+  synchronized public void updateMaps() {
+    BiMap<Integer, String> uMap = HashBiMap.create();
+    BiMap<Integer, String> gMap = HashBiMap.create();
+
+    try {
+      if (OS.startsWith("Linux")) {
+        updateMapInternal(uMap, "user", LINUX_GET_ALL_USERS_CMD, ":");
+        updateMapInternal(gMap, "group", LINUX_GET_ALL_GROUPS_CMD, ":");
+      } else if (OS.startsWith("Mac")) {
+        updateMapInternal(uMap, "user", MAC_GET_ALL_USERS_CMD, "\\s+");
+        updateMapInternal(gMap, "group", MAC_GET_ALL_GROUPS_CMD, "\\s+");
+      } else {
+        throw new IOException("Platform is not supported:" + OS);
+      }
+    } catch (IOException e) {
+      LOG.error("Can't update maps:" + e);
+      return;
+    }
+    uidNameMap = uMap;
+    gidNameMap = gMap;
+    lastUpdateTime = System.currentTimeMillis();
+  }
+
+  synchronized public int getUid(String user) throws IOException {
+    checkAndUpdateMaps();
+
+    Integer id = uidNameMap.inverse().get(user);
+    if (id == null) {
+      throw new IOException("User just deleted?:" + user);
+    }
+    return id.intValue();
+  }
+
+  synchronized public int getGid(String group) throws IOException {
+    checkAndUpdateMaps();
+
+    Integer id = gidNameMap.inverse().get(group);
+    if (id == null) {
+      throw new IOException("No such group:" + group);
+
+    }
+    return id.intValue();
+  }
+
+  synchronized public String getUserName(int uid, String unknown) {
+    checkAndUpdateMaps();
+    String uname = uidNameMap.get(Integer.valueOf(uid));
+    if (uname == null) {
+      uname = unknown;
+    }
+    return uname;
+  }
+
+  synchronized public String getGroupName(int gid, String unknown) {
+    checkAndUpdateMaps();
+    String gname = gidNameMap.get(Integer.valueOf(gid));
+    if (gname == null) {
+      gname = unknown;
+    }
+    return gname;
+  }
+
+  // When can't map user, return user name's string hashcode
+  public int getUidAllowingUnknown(String user) {
+    checkAndUpdateMaps();
+    int uid;
+    try {
+      uid = getUid(user);
+    } catch (IOException e) {
+      uid = user.hashCode();
+      LOG.info("Can't map user " + user + ". Use its string hashcode:" + uid);
+    }
+    return uid;
+  }
+
+  // When can't map group, return group name's string hashcode
+  public int getGidAllowingUnknown(String group) {
+    checkAndUpdateMaps();
+    int gid;
+    try {
+      gid = getGid(group);
+    } catch (IOException e) {
+      gid = group.hashCode();
+      LOG.debug("Can't map group " + group + ". Use its string hashcode:" + gid);
+    }
+    return gid;
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mount.MountdBase;
+import org.apache.hadoop.oncrpc.RpcFrameDecoder;
+import org.apache.hadoop.oncrpc.RpcProgram;
+import org.apache.hadoop.oncrpc.SimpleTcpServer;
+import org.apache.hadoop.oncrpc.SimpleTcpServerHandler;
+import org.apache.hadoop.portmap.PortmapMapping;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.Channels;
+
+/**
+ * Nfs server. Supports NFS v3 using {@link RpcProgram}.
+ * Currently Mountd program is also started inside this class.
+ * Only TCP server is supported and UDP is not supported.
+ */
+public abstract class Nfs3Base {
+  public static final Log LOG = LogFactory.getLog(Nfs3Base.class);
+  private final MountdBase mountd;
+  private final RpcProgram rpcProgram;
+  
+  public MountdBase getMountBase() {
+    return mountd;
+  }
+  
+  public RpcProgram getRpcProgram() {
+    return rpcProgram;
+  }
+
+  protected Nfs3Base(MountdBase mountd, RpcProgram program) {
+    this.mountd = mountd;
+    this.rpcProgram = program;
+  }
+
+  public void start() {
+    mountd.start();     // Start mountd
+    rpcProgram.register(PortmapMapping.TRANSPORT_TCP);
+    startTCPServer();   // Start TCP server
+  }
+
+  private void startTCPServer() {
+    SimpleTcpServer tcpServer = new SimpleTcpServer(Nfs3Constant.PORT,
+        rpcProgram, 0) {
+      @Override
+      public ChannelPipelineFactory getPipelineFactory() {
+        return new ChannelPipelineFactory() {
+          @Override
+          public ChannelPipeline getPipeline() {
+            return Channels.pipeline(new RpcFrameDecoder(),
+                new SimpleTcpServerHandler(rpcProgram));
+          }
+        };
+      }
+    };
+    tcpServer.run();
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Constant.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Constant.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Constant.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Constant.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3;
+
+/**
+ * Some constants for NFSv3
+ */
+public class Nfs3Constant {
+  // The local rpcbind/portmapper port.
+  public final static int SUN_RPCBIND = 111;
+
+  // The IP port number for NFS.
+  public final static int PORT = 2049;
+
+  // The RPC program number for NFS.
+  public final static int PROGRAM = 100003;
+
+  // The program version number that this server implements.
+  public final static int VERSION = 3;
+  
+  // The procedures
+  public final static int NFSPROC3_NULL = 0;
+  public final static int NFSPROC3_GETATTR = 1;
+  public final static int NFSPROC3_SETATTR = 2;
+  public final static int NFSPROC3_LOOKUP = 3;
+  public final static int NFSPROC3_ACCESS = 4;
+  public final static int NFSPROC3_READLINK = 5;
+  public final static int NFSPROC3_READ = 6;
+  public final static int NFSPROC3_WRITE = 7;
+  public final static int NFSPROC3_CREATE = 8;
+  public final static int NFSPROC3_MKDIR = 9;
+  public final static int NFSPROC3_SYMLINK = 10;
+  public final static int NFSPROC3_MKNOD = 11;
+  public final static int NFSPROC3_REMOVE = 12;
+  public final static int NFSPROC3_RMDIR = 13;
+  public final static int NFSPROC3_RENAME = 14;
+  public final static int NFSPROC3_LINK = 15;
+  public final static int NFSPROC3_READDIR = 16;
+  public final static int NFSPROC3_READDIRPLUS = 17;
+  public final static int NFSPROC3_FSSTAT = 18;
+  public final static int NFSPROC3_FSINFO = 19;
+  public final static int NFSPROC3_PATHCONF = 20;
+  public final static int NFSPROC3_COMMIT = 21;
+  
+  // The maximum size in bytes of the opaque file handle.
+  public final static int NFS3_FHSIZE = 64;
+
+  // The byte size of cookie verifier passed by READDIR and READDIRPLUS.
+  public final static int NFS3_COOKIEVERFSIZE = 8;
+
+  // The size in bytes of the opaque verifier used for exclusive CREATE.
+  public final static int NFS3_CREATEVERFSIZE = 8;
+
+  // The size in bytes of the opaque verifier used for asynchronous WRITE.
+  public final static int NFS3_WRITEVERFSIZE = 8;
+
+  /** Access call request mode */
+  // File access mode
+  public static final int ACCESS_MODE_READ = 0x04;
+  public static final int ACCESS_MODE_WRITE = 0x02;
+  public static final int ACCESS_MODE_EXECUTE = 0x01;
+
+  /** Access call response rights */
+  // Read data from file or read a directory.
+  public final static int ACCESS3_READ = 0x0001;
+  // Look up a name in a directory (no meaning for non-directory objects).
+  public final static int ACCESS3_LOOKUP = 0x0002;
+  // Rewrite existing file data or modify existing directory entries.
+  public final static int ACCESS3_MODIFY = 0x0004;
+  // Write new data or add directory entries.
+  public final static int ACCESS3_EXTEND = 0x0008;
+  // Delete an existing directory entry.
+  public final static int ACCESS3_DELETE = 0x0010;
+  // Execute file (no meaning for a directory).
+  public final static int ACCESS3_EXECUTE = 0x0020;
+
+  /** File and directory attribute mode bits */
+  // Set user ID on execution.
+  public final static int MODE_S_ISUID = 0x00800;
+  // Set group ID on execution.
+  public final static int MODE_S_ISGID = 0x00400;
+  // Save swapped text (not defined in POSIX).
+  public final static int MODE_S_ISVTX = 0x00200;
+  // Read permission for owner.
+  public final static int MODE_S_IRUSR = 0x00100;
+  // Write permission for owner.
+  public final static int MODE_S_IWUSR = 0x00080;
+  // Execute permission for owner on a file. Or lookup (search) permission for
+  // owner in directory.
+  public final static int MODE_S_IXUSR = 0x00040;
+  // Read permission for group.
+  public final static int MODE_S_IRGRP = 0x00020;
+  // Write permission for group.
+  public final static int MODE_S_IWGRP = 0x00010;
+  // Execute permission for group on a file. Or lookup (search) permission for
+  // group in directory.
+  public final static int MODE_S_IXGRP = 0x00008;
+  // Read permission for others.
+  public final static int MODE_S_IROTH = 0x00004;
+  // Write permission for others.
+  public final static int MODE_S_IWOTH = 0x00002;
+  // Execute permission for others on a file. Or lookup (search) permission for
+  // others in directory.
+  public final static int MODE_S_IXOTH = 0x00001;
+
+  public final static int MODE_ALL = MODE_S_ISUID | MODE_S_ISGID | MODE_S_ISVTX
+      | MODE_S_ISVTX | MODE_S_IRUSR | MODE_S_IRUSR | MODE_S_IWUSR
+      | MODE_S_IXUSR | MODE_S_IRGRP | MODE_S_IWGRP | MODE_S_IXGRP
+      | MODE_S_IROTH | MODE_S_IWOTH | MODE_S_IXOTH;
+
+  /** Write call flavors */
+  public enum WriteStableHow {
+    UNSTABLE(0), DATA_SYNC(1), FILE_SYNC(2);
+
+    private final int id;
+
+    WriteStableHow(int id) {
+      this.id = id;
+    }
+
+    public int getValue() {
+      return id;
+    }
+
+    public static WriteStableHow fromValue(int id) {
+      return values()[id];
+    }
+  }
+
+  /**
+   * This is a cookie that the client can use to determine whether the server
+   * has changed state between a call to WRITE and a subsequent call to either
+   * WRITE or COMMIT. This cookie must be consistent during a single instance of
+   * the NFS version 3 protocol service and must be unique between instances of
+   * the NFS version 3 protocol server, where uncommitted data may be lost.
+   */
+  public final static long WRITE_COMMIT_VERF = System.currentTimeMillis();
+  
+  /** FileSystemProperties */
+  public final static int FSF3_LINK = 0x0001;
+  public final static int FSF3_SYMLINK = 0x0002;
+  public final static int FSF3_HOMOGENEOUS = 0x0008;
+  public final static int FSF3_CANSETTIME = 0x0010;
+
+  /** Create options */
+  public final static int CREATE_UNCHECKED = 0;
+  public final static int CREATE_GUARDED = 1;
+  public final static int CREATE_EXCLUSIVE = 2;
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3;
+
+import org.apache.hadoop.nfs.NfsFileType;
+import org.apache.hadoop.nfs.NfsTime;
+import org.apache.hadoop.nfs.nfs3.response.WccAttr;
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * File attrbutes reported in NFS.
+ */
+public class Nfs3FileAttributes {
+  private int type;
+  private int mode;
+  private int nlink;
+  private int uid;
+  private int gid;
+  private long size;
+  private long used;
+  private Specdata3 rdev;
+  private long fsid;
+  private long fileid;
+  private NfsTime atime;
+  private NfsTime mtime;
+  private NfsTime ctime;
+
+  /*
+   * The interpretation of the two words depends on the type of file system
+   * object. For a block special (NF3BLK) or character special (NF3CHR) file,
+   * specdata1 and specdata2 are the major and minor device numbers,
+   * respectively. (This is obviously a UNIX-specific interpretation.) For all
+   * other file types, these two elements should either be set to 0 or the
+   * values should be agreed upon by the client and server. If the client and
+   * server do not agree upon the values, the client should treat these fields
+   * as if they are set to 0.
+   * <br>
+   * For Hadoop, currently this field is always zero.
+   */
+  public static class Specdata3 {
+    final static int specdata1 = 0;
+    final static int specdata2 = 0;
+
+    public int getSpecdata1() {
+      return specdata1;
+    }
+
+    public int getSpecdata2() {
+      return specdata2;
+    }
+    
+    @Override
+    public String toString() {
+      return "(Specdata3: specdata1" + specdata1 + ", specdata2:" + specdata2
+          + ")";
+    }
+  }
+   
+  public Nfs3FileAttributes() {
+    this(false, 0, (short)0, 0, 0, 0, 0, 0, 0, 0);
+  }
+
+  public Nfs3FileAttributes(boolean isDir, int nlink, short mode, int uid,
+      int gid, long size, long fsid, long fileid, long mtime, long atime) {
+    this.type = isDir ? NfsFileType.NFSDIR.toValue() : NfsFileType.NFSREG
+        .toValue();
+    this.mode = mode;
+    this.nlink = isDir ? (nlink + 2) : 1;
+    this.uid = uid;
+    this.gid = gid;
+    this.size = size;
+    if(isDir) {
+      this.size = getDirSize(nlink);
+    }
+    this.used = this.size;
+    this.rdev = new Specdata3();
+    this.fsid = fsid;
+    this.fileid = fileid;
+    this.mtime = new NfsTime(mtime);
+    this.atime = atime != 0 ? new NfsTime(atime) : this.mtime;
+    this.ctime = this.mtime;
+  }
+  
+  public void serialize(XDR xdr) {
+    xdr.writeInt(type);
+    xdr.writeInt(mode);
+    xdr.writeInt(nlink);
+    xdr.writeInt(uid);
+    xdr.writeInt(gid);
+    xdr.writeLongAsHyper(size);
+    xdr.writeLongAsHyper(used);
+    xdr.writeInt(rdev.getSpecdata1());
+    xdr.writeInt(rdev.getSpecdata2());
+    xdr.writeLongAsHyper(fsid);
+    xdr.writeLongAsHyper(fileid);
+    atime.serialize(xdr);
+    mtime.serialize(xdr);
+    ctime.serialize(xdr);
+  }
+  
+  public static Nfs3FileAttributes deserialize(XDR xdr) {
+    Nfs3FileAttributes attr = new Nfs3FileAttributes();
+    attr.type = xdr.readInt();
+    attr.mode = xdr.readInt();
+    attr.nlink = xdr.readInt();
+    attr.uid = xdr.readInt();
+    attr.gid = xdr.readInt();
+    attr.size = xdr.readHyper();
+    attr.used = xdr.readHyper();
+    // Ignore rdev
+    xdr.readInt();
+    xdr.readInt();
+    attr.rdev = new Specdata3();
+    attr.fsid = xdr.readHyper();
+    attr.fileid = xdr.readHyper();
+    attr.atime = NfsTime.deserialize(xdr);
+    attr.mtime = NfsTime.deserialize(xdr);
+    attr.ctime = NfsTime.deserialize(xdr);
+    return attr;
+  }
+  
+  @Override
+  public String toString() {
+    return String.format("type:%d, mode:%d, nlink:%d, uid:%d, gid:%d, " + 
+            "size:%d, used:%d, rdev:%s, fsid:%d, fileid:%d, atime:%s, " + 
+            "mtime:%s, ctime:%s",
+            type, mode, nlink, uid, gid, size, used, rdev, fsid, fileid, atime,
+            mtime, ctime);
+  }
+
+  public int getNlink() {
+    return nlink;
+  }
+
+  public long getUsed() {
+    return used;
+  }
+
+  public long getFsid() {
+    return fsid;
+  }
+
+  public long getFileid() {
+    return fileid;
+  }
+
+  public NfsTime getAtime() {
+    return atime;
+  }
+
+  public NfsTime getMtime() {
+    return mtime;
+  }
+
+  public NfsTime getCtime() {
+    return ctime;
+  }
+
+  public int getType() {
+    return type;
+  }
+  
+  public WccAttr getWccAttr() {
+    return new WccAttr(size, mtime, ctime);
+  }
+  
+  public long getFileId() {
+    return fileid;
+  }
+  
+  public long getSize() {
+    return size;
+  }
+  
+  public void setSize(long size) {
+    this.size = size;
+  }
+  
+  public void setUsed(long used) {
+    this.used = used;
+  }
+  
+  public int getMode() {
+    return this.mode;
+  }
+  
+  public int getUid() {
+    return this.uid;
+  }
+  
+  public int getGid() {
+    return this.gid;
+  }
+  
+  /**
+   * HDFS directory size is always zero. Try to return something meaningful
+   * here. Assume each child take 32bytes.
+   */
+  public static long getDirSize(int childNum) {
+    return (childNum + 2) * 32;
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Interface.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Interface.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Interface.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Interface.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3;
+
+import org.apache.hadoop.nfs.nfs3.response.NFS3Response;
+import org.apache.hadoop.oncrpc.RpcAuthSys;
+import org.apache.hadoop.oncrpc.XDR;
+import org.jboss.netty.channel.Channel;
+
+/**
+ * RPC procedures as defined in RFC 1813.
+ */
+public interface Nfs3Interface {
+  
+  /** NULL: Do nothing */
+  public NFS3Response nullProcedure();
+  
+  /** GETATTR: Get file attributes */
+  public NFS3Response getattr(XDR xdr, RpcAuthSys authSys);
+  
+  /** SETATTR: Set file attributes */
+  public NFS3Response setattr(XDR xdr, RpcAuthSys authSys);
+  
+  /** LOOKUP: Lookup filename */
+  public NFS3Response lookup(XDR xdr, RpcAuthSys authSys);
+  
+  /** ACCESS: Check access permission  */
+  public NFS3Response access(XDR xdr, RpcAuthSys authSys);
+  
+  /** READ: Read from file */
+  public NFS3Response read(XDR xdr, RpcAuthSys authSys);
+  
+  /** WRITE: Write to file */
+  public NFS3Response write(XDR xdr, Channel channel, int xid, RpcAuthSys authSys);
+  
+  /** CREATE: Create a file  */
+  public NFS3Response create(XDR xdr, RpcAuthSys authSys);
+  
+  /** MKDIR: Create a directory  */
+  public NFS3Response mkdir(XDR xdr, RpcAuthSys authSys);
+  
+  /** REMOVE: Remove a file  */
+  public NFS3Response remove(XDR xdr, RpcAuthSys authSys);
+  
+  /** RMDIR: Remove a directory  */
+  public NFS3Response rmdir(XDR xdr, RpcAuthSys authSys);
+  
+  /** RENAME: Rename a file or directory */
+  public NFS3Response rename(XDR xdr, RpcAuthSys authSys);
+  
+  /** SYMLINK: Create a symbolic link  */
+  public NFS3Response symlink(XDR xdr, RpcAuthSys authSys);
+  
+  /** READDIR: Read From directory */
+  public NFS3Response readdir(XDR xdr, RpcAuthSys authSys);
+  
+  /** FSSTAT: Get dynamic file system information  */
+  public NFS3Response fsstat(XDR xdr, RpcAuthSys authSys);
+  
+  /** FSINFO: Get static file system information */
+  public NFS3Response fsinfo(XDR xdr, RpcAuthSys authSys);
+  
+  /** PATHCONF: Retrieve POSIX information */
+  public NFS3Response pathconf(XDR xdr, RpcAuthSys authSys);
+  
+  /** COMMIT: Commit cached data on a server to stable storage  */
+  public NFS3Response commit(XDR xdr, RpcAuthSys authSys);
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Status.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Status.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Status.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Status.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3;
+
+/**
+ * Success or error status is reported in NFS3 responses.
+ */
+public class Nfs3Status {
+  
+  /** Indicates the call completed successfully. */
+  public final static int NFS3_OK = 0;
+  
+  /**
+   * The operation was not allowed because the caller is either not a
+   * privileged user (root) or not the owner of the target of the operation.
+   */
+  public final static int NFS3ERR_PERM = 1;
+  
+  /**
+   * No such file or directory. The file or directory name specified does not
+   * exist.
+   */
+  public final static int NFS3ERR_NOENT = 2;
+  
+  /**
+   * I/O error. A hard error (for example, a disk error) occurred while
+   * processing the requested operation.
+   */
+  public final static int NFS3ERR_IO = 5;
+  
+  /** I/O error. No such device or address. */
+  public final static int NFS3ERR_NXIO = 6;
+  
+  /**
+   * Permission denied. The caller does not have the correct permission to
+   * perform the requested operation. Contrast this with NFS3ERR_PERM, which
+   * restricts itself to owner or privileged user permission failures.
+   */
+  public final static int NFS3ERR_ACCES = 13;
+  
+  /** File exists. The file specified already exists. */
+  public final static int NFS3ERR_EXIST = 17;
+  
+  /** Attempt to do a cross-device hard link. */
+  public final static int NFS3ERR_XDEV = 18;
+  
+  /** No such device. */
+  public final static int NFS3ERR_NODEV = 19;
+  
+  /** The caller specified a non-directory in a directory operation. */
+  public static int NFS3ERR_NOTDIR = 20;
+  
+  /** The caller specified a directory in a non-directory operation. */
+  public final static int NFS3ERR_ISDIR = 21;
+  
+  /**
+   * Invalid argument or unsupported argument for an operation. Two examples are
+   * attempting a READLINK on an object other than a symbolic link or attempting
+   * to SETATTR a time field on a server that does not support this operation.
+   */
+  public final static int NFS3ERR_INVAL = 22;
+  
+  /**
+   * File too large. The operation would have caused a file to grow beyond the
+   * server's limit.
+   */
+  public final static int NFS3ERR_FBIG = 27;
+  
+  /**
+   * No space left on device. The operation would have caused the server's file
+   * system to exceed its limit.
+   */
+  public final static int NFS3ERR_NOSPC = 28;
+  
+  /**
+   * Read-only file system. A modifying operation was attempted on a read-only
+   * file system.
+   */
+  public final static int NFS3ERR_ROFS = 30;
+  
+  /** Too many hard links. */
+  public final static int NFS3ERR_MLINK = 31;
+  
+  /** The filename in an operation was too long. */
+  public final static int NFS3ERR_NAMETOOLONG = 63;
+  
+  /** An attempt was made to remove a directory that was not empty. */
+  public final static int NFS3ERR_NOTEMPTY = 66;
+  
+  /**
+   * Resource (quota) hard limit exceeded. The user's resource limit on the
+   * server has been exceeded.
+   */
+  public final static int NFS3ERR_DQUOT = 69;
+  
+  /**
+   * The file handle given in the arguments was invalid. The file referred to by
+   * that file handle no longer exists or access to it has been revoked.
+   */
+  public final static int NFS3ERR_STALE = 70;
+  
+  /**
+   * The file handle given in the arguments referred to a file on a non-local
+   * file system on the server.
+   */
+  public final static int NFS3ERR_REMOTE = 71;
+  
+  /** The file handle failed internal consistency checks */
+  public final static int NFS3ERR_BADHANDLE = 10001;
+  
+  /**
+   * Update synchronization mismatch was detected during a SETATTR operation.
+   */
+  public final static int NFS3ERR_NOT_SYNC = 10002;
+  
+  /** READDIR or READDIRPLUS cookie is stale */
+  public final static int NFS3ERR_BAD_COOKIE = 10003;
+  
+  /** Operation is not supported */
+  public final static int NFS3ERR_NOTSUPP = 10004;
+  
+  /** Buffer or request is too small */
+  public final static int NFS3ERR_TOOSMALL = 10005;
+  
+  /**
+   * An error occurred on the server which does not map to any of the legal NFS
+   * version 3 protocol error values. The client should translate this into an
+   * appropriate error. UNIX clients may choose to translate this to EIO.
+   */
+  public final static int NFS3ERR_SERVERFAULT = 10006;
+  
+  /**
+   * An attempt was made to create an object of a type not supported by the
+   * server.
+   */
+  public final static int NFS3ERR_BADTYPE = 10007;
+  
+  /**
+   * The server initiated the request, but was not able to complete it in a
+   * timely fashion. The client should wait and then try the request with a new
+   * RPC transaction ID. For example, this error should be returned from a
+   * server that supports hierarchical storage and receives a request to process
+   * a file that has been migrated. In this case, the server should start the
+   * immigration process and respond to client with this error.
+   */
+  public final static int NFS3ERR_JUKEBOX = 10008;
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/ACCESS3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/ACCESS3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/ACCESS3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/ACCESS3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * ACCESS3 Request
+ */
+public class ACCESS3Request extends RequestWithHandle {
+  public ACCESS3Request(XDR xdr) throws IOException {
+    super(xdr);
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/COMMIT3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/COMMIT3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/COMMIT3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/COMMIT3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * COMMIT3 Request
+ */
+public class COMMIT3Request extends RequestWithHandle {
+  private final long offset;
+  private final int count;
+
+  public COMMIT3Request(XDR xdr) throws IOException {
+    super(xdr);
+    offset = xdr.readHyper();
+    count = xdr.readInt();
+  }
+
+  public long getOffset() {
+    return this.offset;
+  }
+  
+  public int getCount() {
+    return this.count;
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.nfs.nfs3.FileHandle;
+import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * CREATE3 Request
+ */
+public class CREATE3Request extends RequestWithHandle {
+  private final String name;
+  private final int mode;
+  private SetAttr3 objAttr = null;
+  private long verf;
+
+  public CREATE3Request(FileHandle handle, String name, int mode,
+      SetAttr3 objAttr, long verf) {
+    super(handle);
+    this.name = name;
+    this.mode = mode;
+    this.objAttr = objAttr;
+    this.verf = verf;
+  }
+  
+  public CREATE3Request(XDR xdr) throws IOException {
+    super(xdr);
+    name = xdr.readString();
+    mode = xdr.readInt();
+
+    objAttr = new SetAttr3();
+    if ((mode == Nfs3Constant.CREATE_UNCHECKED)
+        || (mode == Nfs3Constant.CREATE_GUARDED)) {
+      objAttr.deserialize(xdr);
+    } else if (mode == Nfs3Constant.CREATE_EXCLUSIVE) {
+      verf = xdr.readHyper();
+    } else {
+      throw new IOException("Wrong create mode:" + mode);
+    }
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public int getMode() {
+    return mode;
+  }
+
+  public SetAttr3 getObjAttr() {
+    return objAttr;
+  }
+
+  public long getVerf() {
+    return verf;
+  }
+  
+  @Override
+  public void serialize(XDR xdr) {
+    handle.serialize(xdr);
+    xdr.writeInt(name.length());
+    xdr.writeFixedOpaque(name.getBytes(), name.length());
+    xdr.writeInt(mode);
+    objAttr.serialize(xdr);
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSINFO3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSINFO3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSINFO3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSINFO3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * FSINFO3 Request
+ */
+public class FSINFO3Request extends RequestWithHandle {
+  public FSINFO3Request(XDR xdr) throws IOException {
+    super(xdr);
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSSTAT3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSSTAT3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSSTAT3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/FSSTAT3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * FSSTAT3 Request
+ */
+public class FSSTAT3Request extends RequestWithHandle {
+  public FSSTAT3Request(XDR xdr) throws IOException {
+    super(xdr);
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/GETATTR3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/GETATTR3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/GETATTR3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/GETATTR3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * GETATTR3 Request
+ */
+public class GETATTR3Request extends RequestWithHandle {
+  public GETATTR3Request(XDR xdr) throws IOException {
+    super(xdr);
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.nfs.nfs3.FileHandle;
+import org.apache.hadoop.oncrpc.XDR;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * LOOKUP3 Request
+ */
+public class LOOKUP3Request extends RequestWithHandle {
+  private String name;
+
+  public LOOKUP3Request(FileHandle handle, String name) {
+    super(handle);
+    this.name = name;
+  }
+  
+  public LOOKUP3Request(XDR xdr) throws IOException {
+    super(xdr);
+    name = xdr.readString();
+  }
+
+  public String getName() {
+    return this.name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  @Override
+  @VisibleForTesting
+  public void serialize(XDR xdr) {
+    super.serialize(xdr);
+    xdr.writeInt(name.getBytes().length);
+    xdr.writeFixedOpaque(name.getBytes());
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * MKDIR3 Request
+ */
+public class MKDIR3Request extends RequestWithHandle {
+  private final String name;
+  private final SetAttr3 objAttr;
+
+  public MKDIR3Request(XDR xdr) throws IOException {
+    super(xdr);
+    name = xdr.readString();
+    objAttr = new SetAttr3();
+    objAttr.deserialize(xdr);
+  }
+  
+  public String getName() {
+    return name;
+  }
+
+  public SetAttr3 getObjAttr() {
+    return objAttr;
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/PATHCONF3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/PATHCONF3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/PATHCONF3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/PATHCONF3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * PATHCONF3 Request
+ */
+public class PATHCONF3Request extends RequestWithHandle {
+  public PATHCONF3Request(XDR xdr) throws IOException {
+    super(xdr);
+  }
+}

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READ3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READ3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READ3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READ3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * READ3 Request
+ */
+public class READ3Request extends RequestWithHandle {
+  private final long offset;
+  private final int count;
+
+  public READ3Request(XDR xdr) throws IOException {
+    super(xdr);
+    offset = xdr.readHyper();
+    count = xdr.readInt();
+  }
+
+  public long getOffset() {
+    return this.offset;
+  }
+  
+  public int getCount() {
+    return this.count;
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIR3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIR3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIR3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIR3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * READDIR3 Request
+ */
+public class READDIR3Request extends RequestWithHandle {
+  private final long cookie;
+  private final long cookieVerf;
+  private final int count;
+
+  public READDIR3Request(XDR xdr) throws IOException {
+    super(xdr);
+    cookie = xdr.readHyper();
+    cookieVerf = xdr.readHyper();
+    count = xdr.readInt();
+  }
+
+  public long getCookie() {
+    return this.cookie;
+  }
+
+  public long getCookieVerf() {
+    return this.cookieVerf;
+  }
+
+  public long getCount() {
+    return this.count;
+  }
+}
\ No newline at end of file

Added: hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIRPLUS3Request.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIRPLUS3Request.java?rev=1493924&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIRPLUS3Request.java (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/READDIRPLUS3Request.java Mon Jun 17 20:32:13 2013
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * READDIRPLUS3 Request
+ */
+public class READDIRPLUS3Request extends RequestWithHandle {
+  private final long cookie;
+  private final long cookieVerf;
+  private final int dirCount;
+  private final int maxCount;
+
+  public READDIRPLUS3Request(XDR xdr) throws IOException {
+    super(xdr);
+    cookie = xdr.readHyper();
+    cookieVerf = xdr.readHyper();
+    dirCount = xdr.readInt();
+    maxCount = xdr.readInt();
+  }
+
+  public long getCookie() {
+    return this.cookie;
+  }
+
+  public long getCookieVerf() {
+    return this.cookieVerf;
+  }
+
+  public int getDirCount() {
+    return dirCount;
+  }
+
+  public int getMaxCount() {
+    return maxCount;
+  }
+}
\ No newline at end of file



Mime
View raw message