Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 54506200BD5 for ; Thu, 8 Dec 2016 13:54:57 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 52C85160B1F; Thu, 8 Dec 2016 12:54:57 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 520C3160B1E for ; Thu, 8 Dec 2016 13:54:56 +0100 (CET) Received: (qmail 79757 invoked by uid 500); 8 Dec 2016 12:54:55 -0000 Mailing-List: contact commits-help@flink.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@flink.apache.org Delivered-To: mailing list commits@flink.apache.org Received: (qmail 79714 invoked by uid 99); 8 Dec 2016 12:54:54 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 08 Dec 2016 12:54:54 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 38A73DF97E; Thu, 8 Dec 2016 12:54:54 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: chesnay@apache.org To: commits@flink.apache.org Date: Thu, 08 Dec 2016 12:54:55 -0000 Message-Id: <79a7a7348edd45c9bcdfe54b0845aad3@git.apache.org> In-Reply-To: <518160d8fad14b9d996679bba83174e8@git.apache.org> References: <518160d8fad14b9d996679bba83174e8@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [2/3] flink git commit: [FLINK-5164] Disable some Hadoop-compat tests on Windows archived-at: Thu, 08 Dec 2016 12:54:57 -0000 [FLINK-5164] Disable some Hadoop-compat tests on Windows This closes #2889. Project: http://git-wip-us.apache.org/repos/asf/flink/repo Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/fe843e13 Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/fe843e13 Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/fe843e13 Branch: refs/heads/master Commit: fe843e1377aa08a10394bbfa67dc9d3b2a23b805 Parents: 4414008 Author: zentol Authored: Fri Nov 25 14:58:48 2016 +0100 Committer: zentol Committed: Thu Dec 8 12:04:48 2016 +0100 ---------------------------------------------------------------------- .../test/hadoopcompatibility/mapred/HadoopMapredITCase.java | 9 +++++++++ .../mapreduce/HadoopInputOutputITCase.java | 8 ++++++++ .../flink/test/hadoop/mapred/HadoopIOFormatsITCase.java | 9 +++++++++ .../flink/test/hadoop/mapred/WordCountMapredITCase.java | 9 +++++++++ .../test/hadoop/mapreduce/WordCountMapreduceITCase.java | 9 +++++++++ .../api/scala/hadoop/mapred/WordCountMapredITCase.scala | 8 ++++++++ .../scala/hadoop/mapreduce/WordCountMapreduceITCase.scala | 8 ++++++++ 7 files changed, 60 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java ---------------------------------------------------------------------- diff --git a/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java b/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java index ccc0d82..0b5a366 100644 --- a/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java +++ b/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java @@ -21,12 +21,21 @@ package org.apache.flink.test.hadoopcompatibility.mapred; import org.apache.flink.test.hadoopcompatibility.mapred.example.HadoopMapredCompatWordCount; import org.apache.flink.test.testdata.WordCountData; import org.apache.flink.test.util.JavaProgramTestBase; +import org.apache.flink.util.OperatingSystem; +import org.junit.Assume; +import org.junit.Before; public class HadoopMapredITCase extends JavaProgramTestBase { protected String textPath; protected String resultPath; + @Before + public void checkOperatingSystem() { + // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems + Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows()); + } + @Override protected void preSubmit() throws Exception { textPath = createTempFile("text.txt", WordCountData.TEXT); http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java ---------------------------------------------------------------------- diff --git a/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java b/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java index 698e356..48aa258 100644 --- a/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java +++ b/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java @@ -21,12 +21,20 @@ package org.apache.flink.test.hadoopcompatibility.mapreduce; import org.apache.flink.test.hadoopcompatibility.mapreduce.example.WordCount; import org.apache.flink.test.testdata.WordCountData; import org.apache.flink.test.util.JavaProgramTestBase; +import org.apache.flink.util.OperatingSystem; +import org.junit.Assume; +import org.junit.Before; public class HadoopInputOutputITCase extends JavaProgramTestBase { protected String textPath; protected String resultPath; + @Before + public void checkOperatingSystem() { + // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems + Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows()); + } @Override protected void preSubmit() throws Exception { http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java ---------------------------------------------------------------------- diff --git a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java index 0cb1ac5..468b780 100644 --- a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java +++ b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java @@ -26,6 +26,7 @@ import org.apache.flink.configuration.Configuration; import org.apache.flink.api.java.hadoop.mapred.HadoopInputFormat; import org.apache.flink.test.util.JavaProgramTestBase; import org.apache.flink.test.util.TestBaseUtils; +import org.apache.flink.util.OperatingSystem; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; @@ -35,6 +36,8 @@ import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.SequenceFileInputFormat; +import org.junit.Assume; +import org.junit.Before; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; @@ -61,6 +64,12 @@ public class HadoopIOFormatsITCase extends JavaProgramTestBase { super(config); } + @Before + public void checkOperatingSystem() { + // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems + Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows()); + } + @Override protected void preSubmit() throws Exception { resultPath = new String[] {getTempDirPath("result0"), getTempDirPath("result1") }; http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java ---------------------------------------------------------------------- diff --git a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java index 80f311a..9528d94 100644 --- a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java +++ b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java @@ -28,18 +28,27 @@ import static org.apache.flink.hadoopcompatibility.HadoopInputs.readHadoopFile; import org.apache.flink.test.testdata.WordCountData; import org.apache.flink.test.util.JavaProgramTestBase; import org.apache.flink.util.Collector; +import org.apache.flink.util.OperatingSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; +import org.junit.Assume; +import org.junit.Before; public class WordCountMapredITCase extends JavaProgramTestBase { protected String textPath; protected String resultPath; + @Before + public void checkOperatingSystem() { + // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems + Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows()); + } + @Override protected void preSubmit() throws Exception { textPath = createTempFile("text.txt", WordCountData.TEXT); http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java ---------------------------------------------------------------------- diff --git a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java index 3293770..64062d2 100644 --- a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java +++ b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java @@ -28,18 +28,27 @@ import static org.apache.flink.hadoopcompatibility.HadoopInputs.readHadoopFile; import org.apache.flink.test.testdata.WordCountData; import org.apache.flink.test.util.JavaProgramTestBase; import org.apache.flink.util.Collector; +import org.apache.flink.util.OperatingSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.junit.Assume; +import org.junit.Before; public class WordCountMapreduceITCase extends JavaProgramTestBase { protected String textPath; protected String resultPath; + @Before + public void checkOperatingSystem() { + // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems + Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows()); + } + @Override protected void preSubmit() throws Exception { textPath = createTempFile("text.txt", WordCountData.TEXT); http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala ---------------------------------------------------------------------- diff --git a/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala b/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala index 6b414d6..9d04ca59 100644 --- a/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala +++ b/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala @@ -21,14 +21,22 @@ import org.apache.flink.api.scala._ import org.apache.flink.hadoopcompatibility.scala.HadoopInputs import org.apache.flink.test.testdata.WordCountData import org.apache.flink.test.util.{JavaProgramTestBase, TestBaseUtils} +import org.apache.flink.util.OperatingSystem import org.apache.hadoop.fs.Path import org.apache.hadoop.io.{LongWritable, Text} import org.apache.hadoop.mapred.{FileOutputFormat, JobConf, TextInputFormat, TextOutputFormat} +import org.junit.{Assume, Before} class WordCountMapredITCase extends JavaProgramTestBase { protected var textPath: String = null protected var resultPath: String = null + @Before + def checkOperatingSystem() { + // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems + Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows) + } + protected override def preSubmit() { textPath = createTempFile("text.txt", WordCountData.TEXT) resultPath = getTempDirPath("result") http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala ---------------------------------------------------------------------- diff --git a/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala b/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala index e393d23..3b23a13 100644 --- a/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala +++ b/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala @@ -22,16 +22,24 @@ import org.apache.flink.api.scala._ import org.apache.flink.hadoopcompatibility.scala.HadoopInputs import org.apache.flink.test.testdata.WordCountData import org.apache.flink.test.util.{TestBaseUtils, JavaProgramTestBase} +import org.apache.flink.util.OperatingSystem import org.apache.hadoop.fs.Path import org.apache.hadoop.io.{Text, LongWritable} import org.apache.hadoop.mapreduce.Job import org.apache.hadoop.mapreduce.lib.input.TextInputFormat import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat, TextOutputFormat} +import org.junit.{Assume, Before} class WordCountMapreduceITCase extends JavaProgramTestBase { protected var textPath: String = null protected var resultPath: String = null + @Before + def checkOperatingSystem() { + // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems + Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows) + } + protected override def preSubmit() { textPath = createTempFile("text.txt", WordCountData.TEXT) resultPath = getTempDirPath("result")