Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 7865C200BFE for ; Mon, 16 Jan 2017 14:36:36 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 77023160B4D; Mon, 16 Jan 2017 13:36:36 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id ED170160B22 for ; Mon, 16 Jan 2017 14:36:34 +0100 (CET) Received: (qmail 22931 invoked by uid 500); 16 Jan 2017 13:36:34 -0000 Mailing-List: contact commits-help@ignite.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@ignite.apache.org Delivered-To: mailing list commits@ignite.apache.org Received: (qmail 22921 invoked by uid 99); 16 Jan 2017 13:36:34 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 16 Jan 2017 13:36:34 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id F3EDBDFBEC; Mon, 16 Jan 2017 13:36:33 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: vozerov@apache.org To: commits@ignite.apache.org Date: Mon, 16 Jan 2017 13:36:33 -0000 Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: [1/2] ignite git commit: IGNITE-4428: Hadoop: moved HadoopMapReducePlanner and dependent classes to public space. This closes #1389. This closes #1394. archived-at: Mon, 16 Jan 2017 13:36:36 -0000 Repository: ignite Updated Branches: refs/heads/ignite-2.0 77ca2e636 -> d14e0727b http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java index 7aaf3fa..1004e3c 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java @@ -17,8 +17,8 @@ package org.apache.ignite.internal.processors.hadoop.planner; -import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; +import org.apache.ignite.hadoop.HadoopInputSplit; +import org.apache.ignite.hadoop.HadoopMapReducePlan; import org.jetbrains.annotations.Nullable; import java.util.Collection; http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffle.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffle.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffle.java index 8ffea8c..6f9b7a4 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffle.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffle.java @@ -25,9 +25,9 @@ import org.apache.ignite.internal.managers.communication.GridIoPolicy; import org.apache.ignite.internal.managers.communication.GridMessageListener; import org.apache.ignite.internal.processors.hadoop.HadoopComponent; import org.apache.ignite.internal.processors.hadoop.HadoopContext; -import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; +import org.apache.ignite.hadoop.HadoopInputSplit; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; +import org.apache.ignite.hadoop.HadoopMapReducePlan; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInput; import org.apache.ignite.internal.processors.hadoop.HadoopTaskOutput; http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffleJob.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffleJob.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffleJob.java index 318ead3..25925fc 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffleJob.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/shuffle/HadoopShuffleJob.java @@ -21,7 +21,7 @@ import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteInterruptedCheckedException; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopMapperAwareTaskOutput; import org.apache.ignite.internal.processors.hadoop.HadoopMapperUtils; @@ -86,7 +86,7 @@ public class HadoopShuffleJob implements AutoCloseable { private static final boolean DFLT_SHUFFLE_MSG_GZIP = false; /** */ - private final HadoopJob job; + private final HadoopJobEx job; /** */ private final GridUnsafeMemory mem; @@ -169,7 +169,7 @@ public class HadoopShuffleJob implements AutoCloseable { * @param embedded Whether shuffle is running in embedded mode. * @throws IgniteCheckedException If error. */ - public HadoopShuffleJob(T locReduceAddr, IgniteLogger log, HadoopJob job, GridUnsafeMemory mem, + public HadoopShuffleJob(T locReduceAddr, IgniteLogger log, HadoopJobEx job, GridUnsafeMemory mem, int totalReducerCnt, int[] locReducers, int locMappersCnt, boolean embedded) throws IgniteCheckedException { this.locReduceAddr = locReduceAddr; this.totalReducerCnt = totalReducerCnt; @@ -412,6 +412,7 @@ public class HadoopShuffleJob implements AutoCloseable { * * @param msg Message. * @return Buffer. + * @throws IgniteCheckedException On error. */ private byte[] extractBuffer(HadoopDirectShuffleMessage msg) throws IgniteCheckedException { if (msgGzip) { http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopEmbeddedTaskExecutor.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopEmbeddedTaskExecutor.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopEmbeddedTaskExecutor.java index 5ede18e..96893b1 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopEmbeddedTaskExecutor.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopEmbeddedTaskExecutor.java @@ -21,7 +21,7 @@ import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopJobPhase; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; @@ -76,7 +76,7 @@ public class HadoopEmbeddedTaskExecutor extends HadoopTaskExecutorAdapter { } /** {@inheritDoc} */ - @Override public void run(final HadoopJob job, Collection tasks) throws IgniteCheckedException { + @Override public void run(final HadoopJobEx job, Collection tasks) throws IgniteCheckedException { if (log.isDebugEnabled()) log.debug("Submitting tasks for local execution [locNodeId=" + ctx.localNodeId() + ", tasksCnt=" + tasks.size() + ']'); @@ -128,8 +128,8 @@ public class HadoopEmbeddedTaskExecutor extends HadoopTaskExecutorAdapter { * for this job ID. *

* It is guaranteed that this method will not be called concurrently with - * {@link #run(org.apache.ignite.internal.processors.hadoop.HadoopJob, Collection)} method. No more job submissions will be performed via - * {@link #run(org.apache.ignite.internal.processors.hadoop.HadoopJob, Collection)} method for given job ID after this method is called. + * {@link #run(HadoopJobEx, Collection)} method. No more job submissions will be performed via + * {@link #run(HadoopJobEx, Collection)} method for given job ID after this method is called. * * @param jobId Job ID to cancel. */ http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopRunnableTask.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopRunnableTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopRunnableTask.java index a57efe6..afa01a4 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopRunnableTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopRunnableTask.java @@ -21,7 +21,7 @@ import java.util.UUID; import java.util.concurrent.Callable; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopTaskCancelledException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; @@ -51,7 +51,7 @@ public abstract class HadoopRunnableTask implements Callable { private final IgniteLogger log; /** */ - private final HadoopJob job; + private final HadoopJobEx job; /** Task to run. */ private final HadoopTaskInfo info; @@ -84,7 +84,7 @@ public abstract class HadoopRunnableTask implements Callable { * @param info Task info. * @param nodeId Node id. */ - protected HadoopRunnableTask(IgniteLogger log, HadoopJob job, GridUnsafeMemory mem, HadoopTaskInfo info, + protected HadoopRunnableTask(IgniteLogger log, HadoopJobEx job, GridUnsafeMemory mem, HadoopTaskInfo info, UUID nodeId) { this.nodeId = nodeId; this.log = log.getLogger(HadoopRunnableTask.class); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopTaskExecutorAdapter.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopTaskExecutorAdapter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopTaskExecutorAdapter.java index f13c76a..b7d0a34 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopTaskExecutorAdapter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopTaskExecutorAdapter.java @@ -20,7 +20,7 @@ package org.apache.ignite.internal.processors.hadoop.taskexecutor; import java.util.Collection; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopComponent; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobMetadata; @@ -36,15 +36,15 @@ public abstract class HadoopTaskExecutorAdapter extends HadoopComponent { * @param tasks Tasks. * @throws IgniteCheckedException If failed. */ - public abstract void run(final HadoopJob job, Collection tasks) throws IgniteCheckedException; + public abstract void run(final HadoopJobEx job, Collection tasks) throws IgniteCheckedException; /** * Cancels all currently running tasks for given job ID and cancels scheduled execution of tasks * for this job ID. *

* It is guaranteed that this method will not be called concurrently with - * {@link #run(org.apache.ignite.internal.processors.hadoop.HadoopJob, Collection)} method. No more job submissions will be performed via - * {@link #run(org.apache.ignite.internal.processors.hadoop.HadoopJob, Collection)} method for given job ID after this method is called. + * {@link #run(HadoopJobEx, Collection)} method. No more job submissions will be performed via + * {@link #run(HadoopJobEx, Collection)} method for given job ID after this method is called. * * @param jobId Job ID to cancel. */ http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/HadoopExternalTaskExecutor.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/HadoopExternalTaskExecutor.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/HadoopExternalTaskExecutor.java index 2c560bc..afa3e25 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/HadoopExternalTaskExecutor.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/HadoopExternalTaskExecutor.java @@ -33,10 +33,10 @@ import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.hadoop.HadoopContext; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopJobPhase; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; +import org.apache.ignite.hadoop.HadoopMapReducePlan; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskType; import org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobMetadata; @@ -198,7 +198,7 @@ public class HadoopExternalTaskExecutor extends HadoopTaskExecutorAdapter { } } else if (ctx.isParticipating(meta)) { - HadoopJob job; + HadoopJobEx job; try { job = jobTracker.job(meta.jobId(), meta.jobInfo()); @@ -215,7 +215,7 @@ public class HadoopExternalTaskExecutor extends HadoopTaskExecutorAdapter { /** {@inheritDoc} */ @SuppressWarnings("ConstantConditions") - @Override public void run(final HadoopJob job, final Collection tasks) throws IgniteCheckedException { + @Override public void run(final HadoopJobEx job, final Collection tasks) throws IgniteCheckedException { if (!busyLock.tryReadLock()) { if (log.isDebugEnabled()) log.debug("Failed to start hadoop tasks (grid is stopping, will ignore)."); @@ -293,7 +293,7 @@ public class HadoopExternalTaskExecutor extends HadoopTaskExecutorAdapter { * @param job Job instance. * @param tasks Collection of tasks to execute in started process. */ - private void sendExecutionRequest(HadoopProcess proc, HadoopJob job, Collection tasks) + private void sendExecutionRequest(HadoopProcess proc, HadoopJobEx job, Collection tasks) throws IgniteCheckedException { // Must synchronize since concurrent process crash may happen and will receive onConnectionLost(). proc.lock(); @@ -349,7 +349,7 @@ public class HadoopExternalTaskExecutor extends HadoopTaskExecutorAdapter { * @param job Job instance. * @param plan Map reduce plan. */ - private HadoopProcess startProcess(final HadoopJob job, final HadoopMapReducePlan plan) { + private HadoopProcess startProcess(final HadoopJobEx job, final HadoopMapReducePlan plan) { final UUID childProcId = UUID.randomUUID(); HadoopJobId jobId = job.id(); @@ -523,7 +523,7 @@ public class HadoopExternalTaskExecutor extends HadoopTaskExecutorAdapter { * @return Started process. */ private Process startJavaProcess(UUID childProcId, HadoopExternalTaskMetadata startMeta, - HadoopJob job, String igniteWorkDir) throws Exception { + HadoopJobEx job, String igniteWorkDir) throws Exception { String outFldr = jobWorkFolder(job.id()) + File.separator + childProcId; if (log.isDebugEnabled()) @@ -633,7 +633,7 @@ public class HadoopExternalTaskExecutor extends HadoopTaskExecutorAdapter { * @param job Job. * @param plan Map reduce plan. */ - private void prepareForJob(HadoopProcess proc, HadoopJob job, HadoopMapReducePlan plan) { + private void prepareForJob(HadoopProcess proc, HadoopJobEx job, HadoopMapReducePlan plan) { try { comm.sendMessage(proc.descriptor(), new HadoopPrepareForJobRequest(job.id(), job.info(), plan.reducers(), plan.reducers(ctx.localNodeId()))); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java index 3336120..8d23ffe 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java @@ -26,7 +26,7 @@ import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; import org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInput; @@ -90,7 +90,7 @@ public class HadoopChildProcessRunner { private final GridFutureAdapter initFut = new GridFutureAdapter<>(); /** Job instance. */ - private HadoopJob job; + private HadoopJobEx job; /** Number of uncompleted tasks. */ private final AtomicInteger pendingTasks = new AtomicInteger(); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/main/resources/META-INF/classnames.properties ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/resources/META-INF/classnames.properties b/modules/hadoop/src/main/resources/META-INF/classnames.properties index 0ac17cf..f528107 100644 --- a/modules/hadoop/src/main/resources/META-INF/classnames.properties +++ b/modules/hadoop/src/main/resources/META-INF/classnames.properties @@ -29,13 +29,13 @@ org.apache.ignite.internal.processors.hadoop.HadoopAttributes org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo org.apache.ignite.internal.processors.hadoop.HadoopExternalSplit org.apache.ignite.internal.processors.hadoop.HadoopFileBlock -org.apache.ignite.internal.processors.hadoop.HadoopInputSplit +org.apache.ignite.hadoop.HadoopInputSplit org.apache.ignite.internal.processors.hadoop.HadoopJobId org.apache.ignite.internal.processors.hadoop.HadoopJobInfo org.apache.ignite.internal.processors.hadoop.HadoopJobPhase org.apache.ignite.internal.processors.hadoop.HadoopJobProperty org.apache.ignite.internal.processors.hadoop.HadoopJobStatus -org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan +org.apache.ignite.hadoop.HadoopMapReducePlan org.apache.ignite.internal.processors.hadoop.HadoopSplitWrapper org.apache.ignite.internal.processors.hadoop.HadoopTaskCancelledException org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopCommandLineTest.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopCommandLineTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopCommandLineTest.java index 0be8bf9..666acdf 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopCommandLineTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopCommandLineTest.java @@ -38,7 +38,7 @@ import org.apache.ignite.igfs.IgfsInputStream; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgnitionEx; import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobTracker; import org.apache.ignite.internal.processors.igfs.IgfsEx; import org.apache.ignite.internal.processors.resource.GridSpringResourceContext; @@ -245,7 +245,7 @@ public class HadoopCommandLineTest extends GridCommonAbstractTest { private ProcessBuilder createProcessBuilder() { String sep = ":"; - String ggClsPath = HadoopJob.class.getProtectionDomain().getCodeSource().getLocation().getPath() + sep + + String ggClsPath = HadoopJobEx.class.getProtectionDomain().getCodeSource().getLocation().getPath() + sep + HadoopJobTracker.class.getProtectionDomain().getCodeSource().getLocation().getPath() + sep + ConcurrentHashMap8.class.getProtectionDomain().getCodeSource().getLocation().getPath(); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopJobTrackerSelfTest.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopJobTrackerSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopJobTrackerSelfTest.java index 91ad5ec..692c2a3 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopJobTrackerSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopJobTrackerSelfTest.java @@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.ignite.configuration.HadoopConfiguration; +import org.apache.ignite.hadoop.planner.HadoopTestRoundRobinMrPlanner; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.hadoop.Hadoop; http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPlannerMockJob.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPlannerMockJob.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPlannerMockJob.java index 7e6fa9c..28c8264 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPlannerMockJob.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPlannerMockJob.java @@ -20,8 +20,8 @@ package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.processors.hadoop.HadoopHelper; -import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.hadoop.HadoopInputSplit; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; @@ -34,7 +34,7 @@ import java.util.UUID; /** * Mock job for planner tests. */ -public class HadoopPlannerMockJob implements HadoopJob { +public class HadoopPlannerMockJob extends HadoopJobEx { /** Input splits. */ private final Collection splits; @@ -53,7 +53,7 @@ public class HadoopPlannerMockJob implements HadoopJob { } /** {@inheritDoc} */ - @Override public Collection input() throws IgniteCheckedException { + @Override public Collection input() { return splits; } @@ -158,7 +158,7 @@ public class HadoopPlannerMockJob implements HadoopJob { } /** {@inheritDoc} */ - @Override public HadoopJob createJob(Class jobCls, HadoopJobId jobId, IgniteLogger log, + @Override public HadoopJobEx createJob(Class jobCls, HadoopJobId jobId, IgniteLogger log, @Nullable String[] libNames, HadoopHelper helper) throws IgniteCheckedException { throwUnsupported(); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksAllVersionsTest.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksAllVersionsTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksAllVersionsTest.java index 8b1b693..5d34989 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksAllVersionsTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksAllVersionsTest.java @@ -28,7 +28,7 @@ import org.apache.hadoop.io.Text; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskType; import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount2; @@ -48,7 +48,7 @@ abstract class HadoopTasksAllVersionsTest extends HadoopAbstractWordCountTest { * @return Hadoop job. * @throws IOException If fails. */ - public abstract HadoopJob getHadoopJob(String inFile, String outFile) throws Exception; + public abstract HadoopJobEx getHadoopJob(String inFile, String outFile) throws Exception; /** * @return prefix of reducer output file name. It's "part-" for v1 and "part-r-" for v2 API @@ -84,7 +84,7 @@ abstract class HadoopTasksAllVersionsTest extends HadoopAbstractWordCountTest { HadoopFileBlock fileBlock2 = new HadoopFileBlock(HOSTS, inFileUri, fileBlock1.length(), igfs.info(inFile).length() - fileBlock1.length()); - HadoopJob gridJob = getHadoopJob(igfsScheme() + inFile.toString(), igfsScheme() + PATH_OUTPUT); + HadoopJobEx gridJob = getHadoopJob(igfsScheme() + inFile.toString(), igfsScheme() + PATH_OUTPUT); HadoopTaskInfo taskInfo = new HadoopTaskInfo(HadoopTaskType.MAP, gridJob.id(), 0, 0, fileBlock1); @@ -115,7 +115,7 @@ abstract class HadoopTasksAllVersionsTest extends HadoopAbstractWordCountTest { * @return Context with mock output. * @throws IgniteCheckedException If fails. */ - private HadoopTestTaskContext runTaskWithInput(HadoopJob gridJob, HadoopTaskType taskType, + private HadoopTestTaskContext runTaskWithInput(HadoopJobEx gridJob, HadoopTaskType taskType, int taskNum, String... words) throws IgniteCheckedException { HadoopTaskInfo taskInfo = new HadoopTaskInfo(taskType, gridJob.id(), taskNum, 0, null); @@ -141,7 +141,7 @@ abstract class HadoopTasksAllVersionsTest extends HadoopAbstractWordCountTest { * @throws Exception If fails. */ public void testReduceTask() throws Exception { - HadoopJob gridJob = getHadoopJob(igfsScheme() + PATH_INPUT, igfsScheme() + PATH_OUTPUT); + HadoopJobEx gridJob = getHadoopJob(igfsScheme() + PATH_INPUT, igfsScheme() + PATH_OUTPUT); runTaskWithInput(gridJob, HadoopTaskType.REDUCE, 0, "word1", "5", "word2", "10"); runTaskWithInput(gridJob, HadoopTaskType.REDUCE, 1, "word3", "7", "word4", "15"); @@ -167,7 +167,7 @@ abstract class HadoopTasksAllVersionsTest extends HadoopAbstractWordCountTest { * @throws Exception If fails. */ public void testCombinerTask() throws Exception { - HadoopJob gridJob = getHadoopJob("/", "/"); + HadoopJobEx gridJob = getHadoopJob("/", "/"); HadoopTestTaskContext ctx = runTaskWithInput(gridJob, HadoopTaskType.COMBINE, 0, "word1", "5", "word2", "10"); @@ -187,7 +187,7 @@ abstract class HadoopTasksAllVersionsTest extends HadoopAbstractWordCountTest { * @return Context of combine task with mock output. * @throws IgniteCheckedException If fails. */ - private HadoopTestTaskContext runMapCombineTask(HadoopFileBlock fileBlock, HadoopJob gridJob) + private HadoopTestTaskContext runMapCombineTask(HadoopFileBlock fileBlock, HadoopJobEx gridJob) throws IgniteCheckedException { HadoopTaskInfo taskInfo = new HadoopTaskInfo(HadoopTaskType.MAP, gridJob.id(), 0, 0, fileBlock); @@ -233,7 +233,7 @@ abstract class HadoopTasksAllVersionsTest extends HadoopAbstractWordCountTest { HadoopFileBlock fileBlock1 = new HadoopFileBlock(HOSTS, inFileUri, 0, l); HadoopFileBlock fileBlock2 = new HadoopFileBlock(HOSTS, inFileUri, l, fileLen - l); - HadoopJob gridJob = getHadoopJob(inFileUri.toString(), igfsScheme() + PATH_OUTPUT); + HadoopJobEx gridJob = getHadoopJob(inFileUri.toString(), igfsScheme() + PATH_OUTPUT); HadoopTestTaskContext combine1Ctx = runMapCombineTask(fileBlock1, gridJob); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV1Test.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV1Test.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV1Test.java index d7cd738..1d7f3e4 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV1Test.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV1Test.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.UUID; import org.apache.hadoop.mapred.JobConf; import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl; import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount1; @@ -41,7 +41,7 @@ public class HadoopTasksV1Test extends HadoopTasksAllVersionsTest { * @return Hadoop job. * @throws IOException If fails. */ - @Override public HadoopJob getHadoopJob(String inFile, String outFile) throws Exception { + @Override public HadoopJobEx getHadoopJob(String inFile, String outFile) throws Exception { JobConf jobConf = HadoopWordCount1.getJob(inFile, outFile); setupFileSystems(jobConf); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV2Test.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV2Test.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV2Test.java index c635c41..61e3e46 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV2Test.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV2Test.java @@ -26,7 +26,7 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl; import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount2; @@ -46,7 +46,7 @@ public class HadoopTasksV2Test extends HadoopTasksAllVersionsTest { * @return Hadoop job. * @throws Exception if fails. */ - @Override public HadoopJob getHadoopJob(String inFile, String outFile) throws Exception { + @Override public HadoopJobEx getHadoopJob(String inFile, String outFile) throws Exception { Job job = Job.getInstance(); job.setOutputKeyClass(Text.class); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestRoundRobinMrPlanner.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestRoundRobinMrPlanner.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestRoundRobinMrPlanner.java deleted file mode 100644 index 81f6f3c..0000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestRoundRobinMrPlanner.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.impl; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.UUID; -import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.cluster.ClusterNode; -import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlanner; -import org.apache.ignite.internal.processors.hadoop.planner.HadoopDefaultMapReducePlan; -import org.jetbrains.annotations.Nullable; - -/** - * Round-robin mr planner. - */ -public class HadoopTestRoundRobinMrPlanner implements HadoopMapReducePlanner { - /** {@inheritDoc} */ - @Override public HadoopMapReducePlan preparePlan(HadoopJob job, Collection top, - @Nullable HadoopMapReducePlan oldPlan) throws IgniteCheckedException { - if (top.isEmpty()) - throw new IllegalArgumentException("Topology is empty"); - - // Has at least one element. - Iterator it = top.iterator(); - - Map> mappers = new HashMap<>(); - - for (HadoopInputSplit block : job.input()) { - ClusterNode node = it.next(); - - Collection nodeBlocks = mappers.get(node.id()); - - if (nodeBlocks == null) { - nodeBlocks = new ArrayList<>(); - - mappers.put(node.id(), nodeBlocks); - } - - nodeBlocks.add(block); - - if (!it.hasNext()) - it = top.iterator(); - } - - int[] rdc = new int[job.info().reducers()]; - - for (int i = 0; i < rdc.length; i++) - rdc[i] = i; - - return new HadoopDefaultMapReducePlan(mappers, Collections.singletonMap(it.next().id(), rdc)); - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestTaskContext.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestTaskContext.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestTaskContext.java index cfd41cf..fd213a1 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestTaskContext.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestTaskContext.java @@ -33,7 +33,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInput; import org.apache.ignite.internal.processors.hadoop.HadoopTaskOutput; @@ -192,7 +192,7 @@ class HadoopTestTaskContext extends HadoopV2TaskContext { * @param taskInfo Task info. * @param gridJob Grid Hadoop job. */ - public HadoopTestTaskContext(HadoopTaskInfo taskInfo, HadoopJob gridJob) throws IgniteCheckedException { + public HadoopTestTaskContext(HadoopTaskInfo taskInfo, HadoopJobEx gridJob) throws IgniteCheckedException { super(taskInfo, gridJob, gridJob.id(), null, jobConfDataInput(gridJob)); } @@ -203,7 +203,7 @@ class HadoopTestTaskContext extends HadoopV2TaskContext { * @return DataInput with JobConf. * @throws IgniteCheckedException If failed. */ - private static DataInput jobConfDataInput(HadoopJob job) throws IgniteCheckedException { + private static DataInput jobConfDataInput(HadoopJobEx job) throws IgniteCheckedException { JobConf jobConf = new JobConf(); for (Map.Entry e : ((HadoopDefaultJobInfo)job.info()).properties().entrySet()) http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopV2JobSelfTest.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopV2JobSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopV2JobSelfTest.java index 540a7aa..2c2f049 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopV2JobSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopV2JobSelfTest.java @@ -31,7 +31,7 @@ import org.apache.hadoop.io.serializer.WritableSerialization; import org.apache.hadoop.mapred.JobConf; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopSerialization; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; @@ -69,7 +69,7 @@ public class HadoopV2JobSelfTest extends HadoopAbstractSelfTest { } /** - * Tests that {@link HadoopJob} provides wrapped serializer if it's set in configuration. + * Tests that {@link HadoopJobEx} provides wrapped serializer if it's set in configuration. * * @throws IgniteCheckedException If fails. */ @@ -86,7 +86,7 @@ public class HadoopV2JobSelfTest extends HadoopAbstractSelfTest { HadoopJobId id = new HadoopJobId(uuid, 1); - HadoopJob job = info.createJob(HadoopV2Job.class, id, log, null, new HadoopHelperImpl()); + HadoopJobEx job = info.createJob(HadoopV2Job.class, id, log, null, new HadoopHelperImpl()); HadoopTaskContext taskCtx = job.getTaskContext(new HadoopTaskInfo(HadoopTaskType.MAP, null, 0, 0, null)); http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedMapReducePlannerTest.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedMapReducePlannerTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedMapReducePlannerTest.java index 430c675..6dcd998 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedMapReducePlannerTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedMapReducePlannerTest.java @@ -24,9 +24,9 @@ import org.apache.ignite.igfs.IgfsBlockLocation; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteNodeAttributes; import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; -import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; -import org.apache.ignite.internal.processors.hadoop.planner.HadoopAbstractMapReducePlanner; +import org.apache.ignite.hadoop.HadoopInputSplit; +import org.apache.ignite.hadoop.HadoopMapReducePlan; +import org.apache.ignite.hadoop.planner.HadoopAbstractMapReducePlanner; import org.apache.ignite.internal.processors.igfs.IgfsIgniteMock; import org.apache.ignite.internal.processors.igfs.IgfsMock; import org.apache.ignite.internal.util.typedef.F; http://git-wip-us.apache.org/repos/asf/ignite/blob/d14e0727/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopAbstractMapTest.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopAbstractMapTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopAbstractMapTest.java index 1f8978d..b9dcae1 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopAbstractMapTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopAbstractMapTest.java @@ -24,7 +24,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.processors.hadoop.HadoopHelper; -import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobEx; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; import org.apache.ignite.internal.processors.hadoop.HadoopPartitioner; @@ -150,7 +150,7 @@ public abstract class HadoopAbstractMapTest extends GridCommonAbstractTest { } /** {@inheritDoc} */ - @Override public HadoopJob createJob(Class jobCls, HadoopJobId jobId, IgniteLogger log, + @Override public HadoopJobEx createJob(Class jobCls, HadoopJobId jobId, IgniteLogger log, @Nullable String[] libNames, HadoopHelper helper) throws IgniteCheckedException { assert false;