Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id D24706686 for ; Fri, 29 Jul 2011 16:30:01 +0000 (UTC) Received: (qmail 35137 invoked by uid 500); 29 Jul 2011 16:30:01 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 35107 invoked by uid 500); 29 Jul 2011 16:30:01 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 35099 invoked by uid 99); 29 Jul 2011 16:30:01 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 29 Jul 2011 16:30:01 +0000 X-ASF-Spam-Status: No, hits=-1998.0 required=5.0 tests=ALL_TRUSTED,FB_GET_MEDS X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 29 Jul 2011 16:29:53 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id A3BD82388ABC; Fri, 29 Jul 2011 16:28:56 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1152295 [10/10] - in /hadoop/common/trunk/hdfs: ./ bin/ ivy/ src/docs/src/documentation/content/xdocs/ src/java/ src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/protocol/ src/java/org/apache/hadoop/hdfs/server/common/ src/... Date: Fri, 29 Jul 2011 16:28:51 -0000 To: hdfs-commits@hadoop.apache.org From: todd@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20110729162856.A3BD82388ABC@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineEditsViewer/editsStored.xml URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineEditsViewer/editsStored.xml?rev=1152295&r1=1152294&r2=1152295&view=diff ============================================================================== --- hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineEditsViewer/editsStored.xml (original) +++ hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineEditsViewer/editsStored.xml Fri Jul 29 16:28:45 2011 @@ -1,428 +1,498 @@ - -24 + -38 + + 24 + + 1 + + 1504643968 + 21 + 2 1 - 1287183164658 + 1304751257518 3 - drEs + 2FhO + -174778556 21 + 3 2 - 1287183164703 + 1304751257521 3 - 1cGc + 77-r + 1565957291 10 + 4 1001 + 1423210231 0 + 5 5 /file_create 1 - 1286491964741 - 1286491964741 + 1304060057562 + 1304060057562 512 0 - steffl + todd supergroup 420 - DFSClient_471171074 + DFSClient_NONMAPREDUCE_-66857152_1 127.0.0.1 + -1854451489 9 + 6 5 /file_create 1 - 1286491964758 - 1286491964741 + 1304060057572 + 1304060057562 512 0 - steffl + todd supergroup 420 + 617592855 1 + 7 3 /file_create /file_moved - 1286491964766 + 1304060057575 + 367100554 2 + 8 2 /file_moved - 1286491964775 + 1304060057577 + 1048346698 3 + 9 3 /directory_mkdir - 1286491964783 + 1304060057581 0 - steffl + todd supergroup 493 + 1207240248 10 + 10 1002 + 85982431 0 + 11 5 /file_create 1 - 1286491964796 - 1286491964796 + 1304060057584 + 1304060057584 512 0 - steffl + todd supergroup 420 - DFSClient_471171074 + DFSClient_NONMAPREDUCE_-66857152_1 127.0.0.1 + 1796314473 9 + 12 5 /file_create 1 - 1286491964814 - 1286491964796 + 1304060057588 + 1304060057584 512 0 - steffl + todd supergroup 420 + 1017626905 4 + 13 /file_create 1 + 1842610087 7 + 14 /file_create 511 + 605568911 8 + 15 /file_create newOwner + -1411790340 13 + 16 3 /file_create 1285195527000 1285195527000 + 1428793678 14 + 17 /directory_mkdir 1000 -1 + -1476130374 15 + 18 3 /file_create /file_moved - 1286491964858 + 1304060057605 AA + -1155144192 10 + 19 1003 + 1920677987 0 + 20 5 /file_concat_target 1 - 1286491964873 - 1286491964873 + 1304060057613 + 1304060057613 512 0 - steffl + todd supergroup 420 - DFSClient_471171074 + DFSClient_NONMAPREDUCE_-66857152_1 127.0.0.1 + -428545606 9 + 21 5 /file_concat_target 1 - 1286491965024 - 1286491964873 + 1304060057694 + 1304060057613 512 3 - 1096087107607101866 + 3459038074990663911 512 1003 - 8798023959648425597 + -5555244278278879146 512 1003 - 4060815343079109399 + -6344128791846831740 512 1003 - steffl + todd supergroup 420 + 707995174 10 + 22 1004 + -1500977009 0 + 23 5 /file_concat_0 1 - 1286491965035 - 1286491965035 + 1304060057701 + 1304060057701 512 0 - steffl + todd supergroup 420 - DFSClient_471171074 + DFSClient_NONMAPREDUCE_-66857152_1 127.0.0.1 + -119850856 9 + 24 5 /file_concat_0 1 - 1286491965093 - 1286491965035 + 1304060057737 + 1304060057701 512 3 - 85340326229460895 + 4671949296381030428 512 1004 - 4456960998526419279 + -844362243522407159 512 1004 - -6161739531018161735 + 3476886462779656950 512 1004 - steffl + todd supergroup 420 + -766805874 10 + 25 1005 + 238426056 0 + 26 5 /file_concat_1 1 - 1286491965105 - 1286491965105 + 1304060057742 + 1304060057742 512 0 - steffl + todd supergroup 420 - DFSClient_471171074 + DFSClient_NONMAPREDUCE_-66857152_1 127.0.0.1 + 1156254705 9 + 27 5 /file_concat_1 1 - 1286491965148 - 1286491965105 + 1304060057764 + 1304060057742 512 3 - -3894328423940677915 + -754893470864399741 512 1005 - -2833847567910728858 + 1820875380010181049 512 1005 - -3654781106237722465 + 8266387560744259971 512 1005 - steffl + todd supergroup 420 + -654780301 16 + 28 4 /file_concat_target /file_concat_0 /file_concat_1 - 1286491965157 + 1304060057767 + 1273279541 17 + 29 4 /file_symlink /file_concat_target - 1286491965168 - 1286491965168 + 1304060057770 + 1304060057770 - steffl + todd supergroup 511 + 1385678569 18 + 30 0 - steffl + todd JobTracker - 1286491965176 - 1287096765176 + 1304060057773 + 1304664857773 1 2 - 1286578365176 + 1304146457773 + 913145699 19 + 31 0 - steffl + todd JobTracker - 1286491965176 - 1287096765176 + 1304060057773 + 1304664857773 1 2 - 1286578365198 + 1304146457785 + -1772039941 20 + 32 0 - steffl + todd JobTracker - 1286491965176 - 1287096765176 + 1304060057773 + 1304664857773 1 2 + 1382094146 0 + 33 5 /reassign-lease-test 1 @@ -438,14 +508,24 @@ DFSClient_871171074 127.0.0.1 + 1975140107 22 + 34 DFSClient_871171074 /reassign-lease-test HDFS_NameNode + 1975140107 + + + 23 + + 35 + + 1975140107 -1 Modified: hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java?rev=1152295&r1=1152294&r2=1152295&view=diff ============================================================================== --- hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java (original) +++ hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java Fri Jul 29 16:28:45 2011 @@ -28,7 +28,6 @@ import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.net.URI; import java.util.HashMap; import java.util.Set; @@ -41,6 +40,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction; +import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; /** @@ -128,11 +128,10 @@ public class TestOfflineImageViewer exte cluster.getNameNode().saveNamespace(); // Determine location of fsimage file - URI [] files = cluster.getNameDirs(0).toArray(new URI[0]); - orig = new File(files[0].getPath(), "current/fsimage"); - - if (!orig.exists()) { - fail("Didn't generate or can't find fsimage."); + orig = FSImageTestUtil.findLatestImageFile( + cluster.getNameNode().getFSImage().getStorage().getStorageDir(0)); + if (orig == null) { + fail("Didn't generate or can't find fsimage"); } } finally { if(cluster != null) Added: hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java?rev=1152295&view=auto ============================================================================== --- hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java (added) +++ hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java Fri Jul 29 16:28:45 2011 @@ -0,0 +1,132 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.util; + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; + +import org.apache.hadoop.hdfs.DFSTestUtil; +import org.aspectj.util.FileUtil; +import org.junit.Before; +import org.junit.Test; + +import com.google.common.base.Joiner; + +public class TestAtomicFileOutputStream { + + private static final String TEST_STRING = "hello world"; + private static final String TEST_STRING_2 = "goodbye world"; + + private static File BASE_DIR = new File( + System.getProperty("test.build.data", "build/test/data")); + private static File TEST_DIR = new File(BASE_DIR, + TestAtomicFileOutputStream.class.getName()); + + private static File DST_FILE = new File(TEST_DIR, "test.txt"); + + @Before + public void cleanupTestDir() throws IOException { + assertTrue(TEST_DIR.exists() || TEST_DIR.mkdirs()); + FileUtil.deleteContents(TEST_DIR); + } + + /** + * Test case where there is no existing file + */ + @Test + public void testWriteNewFile() throws IOException { + OutputStream fos = new AtomicFileOutputStream(DST_FILE); + assertFalse(DST_FILE.exists()); + fos.write(TEST_STRING.getBytes()); + fos.flush(); + assertFalse(DST_FILE.exists()); + fos.close(); + assertTrue(DST_FILE.exists()); + + String readBackData = DFSTestUtil.readFile(DST_FILE); + assertEquals(TEST_STRING, readBackData); + } + + /** + * Test case where there is no existing file + */ + @Test + public void testOverwriteFile() throws IOException { + assertTrue("Creating empty dst file", DST_FILE.createNewFile()); + + OutputStream fos = new AtomicFileOutputStream(DST_FILE); + + assertTrue("Empty file still exists", DST_FILE.exists()); + fos.write(TEST_STRING.getBytes()); + fos.flush(); + + // Original contents still in place + assertEquals("", DFSTestUtil.readFile(DST_FILE)); + + fos.close(); + + // New contents replace original file + String readBackData = DFSTestUtil.readFile(DST_FILE); + assertEquals(TEST_STRING, readBackData); + } + + /** + * Test case where the flush() fails at close time - make sure + * that we clean up after ourselves and don't touch any + * existing file at the destination + */ + @Test + public void testFailToFlush() throws IOException { + // Create a file at destination + FileOutputStream fos = new FileOutputStream(DST_FILE); + fos.write(TEST_STRING_2.getBytes()); + fos.close(); + + OutputStream failingStream = createFailingStream(); + failingStream.write(TEST_STRING.getBytes()); + try { + failingStream.close(); + fail("Close didn't throw exception"); + } catch (IOException ioe) { + // expected + } + + // Should not have touched original file + assertEquals(TEST_STRING_2, DFSTestUtil.readFile(DST_FILE)); + + assertEquals("Temporary file should have been cleaned up", + DST_FILE.getName(), Joiner.on(",").join(TEST_DIR.list())); + } + + /** + * Create a stream that fails to flush at close time + */ + private OutputStream createFailingStream() throws FileNotFoundException { + return new AtomicFileOutputStream(DST_FILE) { + @Override + public void flush() throws IOException { + throw new IOException("injected failure"); + } + }; + } +} Added: hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/util/TestMD5FileUtils.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/util/TestMD5FileUtils.java?rev=1152295&view=auto ============================================================================== --- hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/util/TestMD5FileUtils.java (added) +++ hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/util/TestMD5FileUtils.java Fri Jul 29 16:28:45 2011 @@ -0,0 +1,111 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.util; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.io.IOException; + +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.hdfs.DFSTestUtil; +import org.apache.hadoop.hdfs.util.MD5FileUtils; +import org.apache.hadoop.io.MD5Hash; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.*; + +public class TestMD5FileUtils { + private static final File TEST_DIR_ROOT = new File( + System.getProperty("test.build.data","build/test/data")); + private static final File TEST_DIR = new File(TEST_DIR_ROOT, + "TestMD5FileUtils"); + private static final File TEST_FILE = new File(TEST_DIR, + "testMd5File.dat"); + + private static final int TEST_DATA_LEN = 128 * 1024; // 128KB test data + private static final byte[] TEST_DATA = + DFSTestUtil.generateSequentialBytes(0, TEST_DATA_LEN); + private static final MD5Hash TEST_MD5 = MD5Hash.digest(TEST_DATA); + + @Before + public void setup() throws IOException { + FileUtil.fullyDelete(TEST_DIR); + assertTrue(TEST_DIR.mkdirs()); + + // Write a file out + FileOutputStream fos = new FileOutputStream(TEST_FILE); + fos.write(TEST_DATA); + fos.close(); + } + + @Test + public void testComputeMd5ForFile() throws Exception { + MD5Hash computedDigest = MD5FileUtils.computeMd5ForFile(TEST_FILE); + assertEquals(TEST_MD5, computedDigest); + } + + @Test + public void testVerifyMD5FileGood() throws Exception { + MD5FileUtils.saveMD5File(TEST_FILE, TEST_MD5); + MD5FileUtils.verifySavedMD5(TEST_FILE, TEST_MD5); + } + + /** + * Test when .md5 file does not exist at all + */ + @Test(expected=IOException.class) + public void testVerifyMD5FileMissing() throws Exception { + MD5FileUtils.verifySavedMD5(TEST_FILE, TEST_MD5); + } + + /** + * Test when .md5 file exists but incorrect checksum + */ + @Test + public void testVerifyMD5FileBadDigest() throws Exception { + MD5FileUtils.saveMD5File(TEST_FILE, MD5Hash.digest(new byte[0])); + try { + MD5FileUtils.verifySavedMD5(TEST_FILE, TEST_MD5); + fail("Did not throw"); + } catch (IOException ioe) { + // Expected + } + } + + /** + * Test when .md5 file exists but has a bad format + */ + @Test + public void testVerifyMD5FileBadFormat() throws Exception { + FileWriter writer = new FileWriter(MD5FileUtils.getDigestFileForFile(TEST_FILE)); + try { + writer.write("this is not an md5 file"); + } finally { + writer.close(); + } + + try { + MD5FileUtils.verifySavedMD5(TEST_FILE, TEST_MD5); + fail("Did not throw"); + } catch (IOException ioe) { + // expected + } + } +} Modified: hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/test/GenericTestUtils.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/test/GenericTestUtils.java?rev=1152295&r1=1152294&r2=1152295&view=diff ============================================================================== --- hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/test/GenericTestUtils.java (original) +++ hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/test/GenericTestUtils.java Fri Jul 29 16:28:45 2011 @@ -17,16 +17,29 @@ */ package org.apache.hadoop.test; +import java.io.File; import java.io.IOException; +import java.util.Arrays; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeoutException; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption; -import org.apache.hadoop.hdfs.server.namenode.NameNode; +import org.apache.commons.logging.Log; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.util.StringUtils; +import org.junit.Assert; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import com.google.common.base.Joiner; +import com.google.common.base.Supplier; +import com.google.common.collect.Sets; /** * Test provides some very generic helpers which might be used across the tests */ public abstract class GenericTestUtils { + /** * Extracts the name of the method where the invocation has happened * @return String name of the invoking method @@ -34,4 +47,133 @@ public abstract class GenericTestUtils { public static String getMethodName() { return Thread.currentThread().getStackTrace()[2].getMethodName(); } + + /** + * Assert that a given file exists. + */ + public static void assertExists(File f) { + Assert.assertTrue("File " + f + " should exist", f.exists()); + } + + /** + * List all of the files in 'dir' that match the regex 'pattern'. + * Then check that this list is identical to 'expectedMatches'. + * @throws IOException if the dir is inaccessible + */ + public static void assertGlobEquals(File dir, String pattern, + String ... expectedMatches) throws IOException { + + Set found = Sets.newTreeSet(); + for (File f : FileUtil.listFiles(dir)) { + if (f.getName().matches(pattern)) { + found.add(f.getName()); + } + } + Set expectedSet = Sets.newTreeSet( + Arrays.asList(expectedMatches)); + Assert.assertEquals("Bad files matching " + pattern + " in " + dir, + Joiner.on(",").join(found), + Joiner.on(",").join(expectedSet)); + } + + public static void assertExceptionContains(String string, Throwable t) { + String msg = t.getMessage(); + Assert.assertTrue( + "Unexpected exception:" + StringUtils.stringifyException(t), + msg.contains(string)); + } + + public static void waitFor(Supplier check, + int checkEveryMillis, int waitForMillis) + throws TimeoutException, InterruptedException + { + long st = System.currentTimeMillis(); + do { + boolean result = check.get(); + if (result) { + return; + } + + Thread.sleep(checkEveryMillis); + } while (System.currentTimeMillis() - st < waitForMillis); + throw new TimeoutException("Timed out waiting for condition"); + } + + + /** + * Mockito answer helper that triggers one latch as soon as the + * method is called, then waits on another before continuing. + */ + public static class DelayAnswer implements Answer { + private final Log LOG; + + private final CountDownLatch fireLatch = new CountDownLatch(1); + private final CountDownLatch waitLatch = new CountDownLatch(1); + + + public DelayAnswer(Log log) { + this.LOG = log; + } + + /** + * Wait until the method is called. + */ + public void waitForCall() throws InterruptedException { + fireLatch.await(); + } + + /** + * Tell the method to proceed. + * This should only be called after waitForCall() + */ + public void proceed() { + waitLatch.countDown(); + } + + public Object answer(InvocationOnMock invocation) throws Throwable { + LOG.info("DelayAnswer firing fireLatch"); + fireLatch.countDown(); + try { + LOG.info("DelayAnswer waiting on waitLatch"); + waitLatch.await(); + LOG.info("DelayAnswer delay complete"); + } catch (InterruptedException ie) { + throw new IOException("Interrupted waiting on latch", ie); + } + return passThrough(invocation); + } + + protected Object passThrough(InvocationOnMock invocation) throws Throwable { + return invocation.callRealMethod(); + } + } + + /** + * An Answer implementation that simply forwards all calls through + * to a delegate. + * + * This is useful as the default Answer for a mock object, to create + * something like a spy on an RPC proxy. For example: + * + * NamenodeProtocol origNNProxy = secondary.getNameNode(); + * NamenodeProtocol spyNNProxy = Mockito.mock(NameNodeProtocol.class, + * new DelegateAnswer(origNNProxy); + * doThrow(...).when(spyNNProxy).getBlockLocations(...); + * ... + * + */ + public static class DelegateAnswer implements Answer { + private final Object delegate; + + public DelegateAnswer(Object delegate) { + this.delegate = delegate; + } + + @Override + public Object answer(InvocationOnMock invocation) throws Throwable { + return invocation.getMethod().invoke( + delegate, invocation.getArguments()); + } + } + }