flink-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rmetz...@apache.org
Subject [50/50] flink git commit: [FLINK-1712] Remove "flink-staging" module
Date Thu, 14 Jan 2016 16:16:47 GMT
[FLINK-1712] Remove "flink-staging" module

This closes #1492
This closes #1482


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/e9bf13d8
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/e9bf13d8
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/e9bf13d8

Branch: refs/heads/master
Commit: e9bf13d8626099a1d6ddb6ebe98c50be848fe79e
Parents: b7520a5
Author: Robert Metzger <rmetzger@apache.org>
Authored: Wed Jan 6 18:02:06 2016 +0100
Committer: Robert Metzger <rmetzger@apache.org>
Committed: Thu Jan 14 14:46:55 2016 +0100

----------------------------------------------------------------------
 .gitignore                                      |    2 +-
 docs/apis/hadoop_compatibility.md               |    2 +-
 docs/apis/streaming_guide.md                    |    2 +-
 docs/internals/general_arch.md                  |    4 +-
 docs/internals/stream_checkpointing.md          |    2 +-
 docs/setup/flink_on_tez.md                      |    7 +-
 flink-batch-connectors/flink-avro/pom.xml       |  205 ++++
 .../apache/flink/api/avro/DataInputDecoder.java |  213 ++++
 .../flink/api/avro/DataOutputEncoder.java       |  183 ++++
 .../api/avro/FSDataInputStreamWrapper.java      |   68 ++
 .../api/io/avro/example/AvroTypeExample.java    |  111 ++
 .../apache/flink/api/io/avro/example/User.java  |  269 +++++
 .../flink/api/java/io/AvroInputFormat.java      |  131 +++
 .../flink/api/java/io/AvroOutputFormat.java     |  100 ++
 .../src/test/assembly/test-assembly.xml         |   36 +
 .../api/avro/AvroExternalJarProgramITCase.java  |   88 ++
 .../flink/api/avro/AvroOutputFormatITCase.java  |  173 +++
 .../flink/api/avro/EncoderDecoderTest.java      |  528 +++++++++
 .../avro/testjar/AvroExternalJarProgram.java    |  219 ++++
 .../apache/flink/api/io/avro/AvroPojoTest.java  |  255 +++++
 .../api/io/avro/AvroRecordInputFormatTest.java  |  299 ++++++
 .../io/avro/AvroSplittableInputFormatTest.java  |  233 ++++
 .../io/AvroInputFormatTypeExtractionTest.java   |   79 ++
 .../src/test/resources/avro/user.avsc           |   35 +
 .../src/test/resources/log4j-test.properties    |   27 +
 .../src/test/resources/logback-test.xml         |   29 +
 .../flink-avro/src/test/resources/testdata.avro |  Bin 0 -> 4572 bytes
 .../flink-hadoop-compatibility/pom.xml          |   69 ++
 .../mapred/HadoopMapFunction.java               |  131 +++
 .../mapred/HadoopReduceCombineFunction.java     |  164 +++
 .../mapred/HadoopReduceFunction.java            |  140 +++
 .../example/HadoopMapredCompatWordCount.java    |  133 +++
 .../mapred/wrapper/HadoopOutputCollector.java   |   64 ++
 .../wrapper/HadoopTupleUnwrappingIterator.java  |   93 ++
 .../mapreduce/example/WordCount.java            |  120 +++
 .../mapred/HadoopMapFunctionITCase.java         |  182 ++++
 .../mapred/HadoopMapredITCase.java              |   47 +
 .../HadoopReduceCombineFunctionITCase.java      |  265 +++++
 .../mapred/HadoopReduceFunctionITCase.java      |  213 ++++
 .../mapred/HadoopTestData.java                  |   62 ++
 .../HadoopTupleUnwrappingIteratorTest.java      |  139 +++
 .../mapreduce/HadoopInputOutputITCase.java      |   47 +
 .../src/test/resources/log4j-test.properties    |   27 +
 .../src/test/resources/logback-test.xml         |   29 +
 flink-batch-connectors/flink-hbase/pom.xml      |  220 ++++
 .../flink/addons/hbase/TableInputFormat.java    |  256 +++++
 .../flink/addons/hbase/TableInputSplit.java     |   89 ++
 .../hbase/example/HBaseFlinkTestConstants.java  |   28 +
 .../addons/hbase/example/HBaseReadExample.java  |   92 ++
 .../addons/hbase/example/HBaseWriteExample.java |  202 ++++
 .../hbase/example/HBaseWriteStreamExample.java  |  118 ++
 .../src/test/resources/hbase-site.xml           |   43 +
 .../src/test/resources/log4j.properties         |   23 +
 flink-batch-connectors/flink-hcatalog/pom.xml   |  179 ++++
 .../flink/hcatalog/HCatInputFormatBase.java     |  410 +++++++
 .../flink/hcatalog/java/HCatInputFormat.java    |  160 +++
 .../flink/hcatalog/scala/HCatInputFormat.scala  |  229 ++++
 flink-batch-connectors/flink-jdbc/pom.xml       |   52 +
 .../flink/api/java/io/jdbc/JDBCInputFormat.java |  358 +++++++
 .../api/java/io/jdbc/JDBCOutputFormat.java      |  270 +++++
 .../api/java/io/jdbc/example/JDBCExample.java   |  101 ++
 .../api/java/io/jdbc/JDBCInputFormatTest.java   |  195 ++++
 .../api/java/io/jdbc/JDBCOutputFormatTest.java  |  240 +++++
 .../src/test/resources/log4j-test.properties    |   19 +
 .../src/test/resources/logback-test.xml         |   29 +
 flink-batch-connectors/pom.xml                  |   45 +
 flink-contrib/flink-tez/pom.xml                 |  224 ++++
 .../flink-tez/src/assembly/flink-fat-jar.xml    |   42 +
 .../flink/tez/client/LocalTezEnvironment.java   |   76 ++
 .../flink/tez/client/RemoteTezEnvironment.java  |   83 ++
 .../apache/flink/tez/client/TezExecutor.java    |  219 ++++
 .../flink/tez/client/TezExecutorTool.java       |   80 ++
 .../flink/tez/dag/FlinkBroadcastEdge.java       |   70 ++
 .../flink/tez/dag/FlinkDataSinkVertex.java      |   61 ++
 .../flink/tez/dag/FlinkDataSourceVertex.java    |   82 ++
 .../org/apache/flink/tez/dag/FlinkEdge.java     |   45 +
 .../apache/flink/tez/dag/FlinkForwardEdge.java  |   71 ++
 .../flink/tez/dag/FlinkPartitionEdge.java       |   71 ++
 .../flink/tez/dag/FlinkProcessorVertex.java     |   61 ++
 .../apache/flink/tez/dag/FlinkUnionVertex.java  |   61 ++
 .../org/apache/flink/tez/dag/FlinkVertex.java   |  114 ++
 .../apache/flink/tez/dag/TezDAGGenerator.java   |  460 ++++++++
 .../tez/examples/ConnectedComponentsStep.java   |  203 ++++
 .../flink/tez/examples/ExampleDriver.java       |  119 +++
 .../flink/tez/examples/PageRankBasicStep.java   |  241 +++++
 .../apache/flink/tez/examples/TPCHQuery3.java   |  224 ++++
 .../examples/TransitiveClosureNaiveStep.java    |  135 +++
 .../apache/flink/tez/examples/WordCount.java    |  129 +++
 .../flink/tez/runtime/DataSinkProcessor.java    |  228 ++++
 .../flink/tez/runtime/DataSourceProcessor.java  |  190 ++++
 .../flink/tez/runtime/RegularProcessor.java     |  138 +++
 .../tez/runtime/TezRuntimeEnvironment.java      |   44 +
 .../org/apache/flink/tez/runtime/TezTask.java   |  578 ++++++++++
 .../apache/flink/tez/runtime/TezTaskConfig.java |  163 +++
 .../flink/tez/runtime/UnionProcessor.java       |  106 ++
 .../flink/tez/runtime/input/FlinkInput.java     |  139 +++
 .../runtime/input/FlinkInputSplitGenerator.java |   94 ++
 .../tez/runtime/input/TezReaderIterator.java    |   66 ++
 .../tez/runtime/output/SimplePartitioner.java   |   35 +
 .../tez/runtime/output/TezChannelSelector.java  |   36 +
 .../tez/runtime/output/TezOutputCollector.java  |   72 ++
 .../tez/runtime/output/TezOutputEmitter.java    |  190 ++++
 .../apache/flink/tez/util/DummyInvokable.java   |   51 +
 .../apache/flink/tez/util/EncodingUtils.java    |   64 ++
 .../flink/tez/util/FlinkSerialization.java      |  310 ++++++
 .../src/main/resources/log4j.properties         |   30 +
 .../tez/test/ConnectedComponentsStepITCase.java |   83 ++
 .../flink/tez/test/PageRankBasicStepITCase.java |   54 +
 .../flink/tez/test/TezProgramTestBase.java      |  108 ++
 .../flink/tez/test/WebLogAnalysisITCase.java    |   48 +
 .../apache/flink/tez/test/WordCountITCase.java  |   47 +
 .../src/test/resources/log4j-test.properties    |   30 +
 .../src/test/resources/logback-test.xml         |   37 +
 flink-contrib/pom.xml                           |   16 +-
 flink-dist/src/main/assemblies/bin.xml          |   13 +-
 flink-examples/flink-examples-batch/pom.xml     |    6 +-
 flink-fs-tests/pom.xml                          |   97 ++
 .../flink/hdfstests/FileStateBackendTest.java   |  309 ++++++
 .../org/apache/flink/hdfstests/HDFSTest.java    |  188 ++++
 .../src/test/resources/log4j-test.properties    |   31 +
 .../src/test/resources/log4j.properties         |   31 +
 flink-libraries/flink-ml/README.md              |   22 +
 flink-libraries/flink-ml/pom.xml                |  162 +++
 .../scala/org/apache/flink/ml/MLUtils.scala     |  122 +++
 .../apache/flink/ml/classification/SVM.scala    |  550 ++++++++++
 .../org/apache/flink/ml/common/Block.scala      |   29 +
 .../apache/flink/ml/common/FlinkMLTools.scala   |  423 ++++++++
 .../apache/flink/ml/common/LabeledVector.scala  |   42 +
 .../apache/flink/ml/common/ParameterMap.scala   |  121 +++
 .../apache/flink/ml/common/WeightVector.scala   |   32 +
 .../apache/flink/ml/common/WithParameters.scala |   26 +
 .../scala/org/apache/flink/ml/math/BLAS.scala   |  291 +++++
 .../scala/org/apache/flink/ml/math/Breeze.scala |   88 ++
 .../flink/ml/math/BreezeVectorConverter.scala   |   34 +
 .../org/apache/flink/ml/math/DenseMatrix.scala  |  193 ++++
 .../org/apache/flink/ml/math/DenseVector.scala  |  184 ++++
 .../scala/org/apache/flink/ml/math/Matrix.scala |   69 ++
 .../org/apache/flink/ml/math/SparseMatrix.scala |  267 +++++
 .../org/apache/flink/ml/math/SparseVector.scala |  285 +++++
 .../scala/org/apache/flink/ml/math/Vector.scala |  104 ++
 .../apache/flink/ml/math/VectorBuilder.scala    |   57 +
 .../org/apache/flink/ml/math/package.scala      |  110 ++
 .../distances/ChebyshevDistanceMetric.scala     |   37 +
 .../distances/CosineDistanceMetric.scala        |   45 +
 .../ml/metrics/distances/DistanceMetric.scala   |   37 +
 .../distances/EuclideanDistanceMetric.scala     |   41 +
 .../distances/ManhattanDistanceMetric.scala     |   37 +
 .../distances/MinkowskiDistanceMetric.scala     |   41 +
 .../SquaredEuclideanDistanceMetric.scala        |   37 +
 .../distances/TanimotoDistanceMetric.scala      |   40 +
 .../flink/ml/optimization/GradientDescent.scala |  350 ++++++
 .../flink/ml/optimization/LossFunction.scala    |   96 ++
 .../ml/optimization/PartialLossFunction.scala   |   67 ++
 .../ml/optimization/PredictionFunction.scala    |   40 +
 .../apache/flink/ml/optimization/Solver.scala   |  152 +++
 .../scala/org/apache/flink/ml/package.scala     |  119 +++
 .../flink/ml/pipeline/ChainedPredictor.scala    |  139 +++
 .../flink/ml/pipeline/ChainedTransformer.scala  |  110 ++
 .../apache/flink/ml/pipeline/Estimator.scala    |  181 ++++
 .../apache/flink/ml/pipeline/Predictor.scala    |  258 +++++
 .../apache/flink/ml/pipeline/Transformer.scala  |  164 +++
 .../flink/ml/preprocessing/MinMaxScaler.scala   |  265 +++++
 .../ml/preprocessing/PolynomialFeatures.scala   |  209 ++++
 .../flink/ml/preprocessing/StandardScaler.scala |  302 ++++++
 .../apache/flink/ml/recommendation/ALS.scala    | 1009 ++++++++++++++++++
 .../regression/MultipleLinearRegression.scala   |  212 ++++
 .../src/test/resources/log4j-test.properties    |   38 +
 .../src/test/resources/logback-test.xml         |   42 +
 .../org/apache/flink/ml/MLUtilsSuite.scala      |  112 ++
 .../ml/classification/Classification.scala      |  133 +++
 .../flink/ml/classification/SVMITSuite.scala    |  104 ++
 .../flink/ml/common/FlinkMLToolsSuite.scala     |   60 ++
 .../apache/flink/ml/math/BreezeMathSuite.scala  |   98 ++
 .../apache/flink/ml/math/DenseMatrixSuite.scala |   86 ++
 .../apache/flink/ml/math/DenseVectorSuite.scala |  200 ++++
 .../flink/ml/math/SparseMatrixSuite.scala       |  134 +++
 .../flink/ml/math/SparseVectorSuite.scala       |  227 ++++
 .../metrics/distances/DistanceMetricSuite.scala |   95 ++
 .../optimization/GradientDescentITSuite.scala   |  245 +++++
 .../ml/optimization/LossFunctionITSuite.scala   |   51 +
 .../PredictionFunctionITSuite.scala             |   62 ++
 .../flink/ml/pipeline/PipelineITSuite.scala     |  211 ++++
 .../ml/preprocessing/MinMaxScalerITSuite.scala  |  243 +++++
 .../PolynomialFeaturesITSuite.scala             |  124 +++
 .../preprocessing/StandardScalerITSuite.scala   |  166 +++
 .../flink/ml/recommendation/ALSITSuite.scala    |   76 ++
 .../ml/recommendation/Recommendation.scala      |   90 ++
 .../MultipleLinearRegressionITSuite.scala       |  133 +++
 .../flink/ml/regression/RegressionData.scala    |  191 ++++
 flink-libraries/flink-table/pom.xml             |  258 +++++
 .../flink/api/java/table/package-info.java      |   60 ++
 .../apache/flink/api/table/package-info.java    |   33 +
 .../flink/examples/java/JavaTableExample.java   |   71 ++
 .../api/java/table/JavaBatchTranslator.scala    |  346 ++++++
 .../java/table/JavaStreamingTranslator.scala    |  241 +++++
 .../flink/api/java/table/TableEnvironment.scala |  111 ++
 .../api/scala/table/DataSetConversions.scala    |   67 ++
 .../api/scala/table/DataStreamConversions.scala |   68 ++
 .../api/scala/table/ScalaBatchTranslator.scala  |   68 ++
 .../scala/table/ScalaStreamingTranslator.scala  |   58 +
 .../api/scala/table/TableConversions.scala      |   47 +
 .../flink/api/scala/table/expressionDsl.scala   |  124 +++
 .../apache/flink/api/scala/table/package.scala  |  105 ++
 .../flink/api/table/ExpressionException.scala   |   23 +
 .../scala/org/apache/flink/api/table/Row.scala  |   38 +
 .../org/apache/flink/api/table/Table.scala      |  294 +++++
 .../apache/flink/api/table/TableConfig.scala    |   66 ++
 .../table/codegen/ExpressionCodeGenerator.scala |  829 ++++++++++++++
 .../api/table/codegen/GenerateFilter.scala      |   99 ++
 .../flink/api/table/codegen/GenerateJoin.scala  |  171 +++
 .../table/codegen/GenerateResultAssembler.scala |  119 +++
 .../api/table/codegen/GenerateSelect.scala      |   84 ++
 .../flink/api/table/codegen/Indenter.scala      |   54 +
 .../flink/api/table/codegen/package.scala       |   25 +
 .../api/table/expressions/Expression.scala      |   60 ++
 .../api/table/expressions/aggregations.scala    |   99 ++
 .../analysis/ExtractEquiJoinFields.scala        |   70 ++
 .../expressions/analysis/GroupByAnalyzer.scala  |   51 +
 .../expressions/analysis/InsertAutoCasts.scala  |   92 ++
 .../analysis/PredicateAnalyzer.scala            |   35 +
 .../analysis/ResolveFieldReferences.scala       |   60 ++
 .../analysis/SelectionAnalyzer.scala            |   36 +
 .../table/expressions/analysis/TypeCheck.scala  |   57 +
 .../expressions/analysis/VerifyBoolean.scala    |   41 +
 .../analysis/VerifyNoAggregates.scala           |   53 +
 .../analysis/VerifyNoNestedAggregates.scala     |   54 +
 .../api/table/expressions/arithmetic.scala      |  145 +++
 .../flink/api/table/expressions/cast.scala      |   34 +
 .../api/table/expressions/comparison.scala      |   93 ++
 .../api/table/expressions/fieldExpression.scala |   41 +
 .../flink/api/table/expressions/literals.scala  |   42 +
 .../flink/api/table/expressions/logic.scala     |   58 +
 .../flink/api/table/expressions/package.scala   |   29 +
 .../table/expressions/stringExpressions.scala   |   46 +
 .../org/apache/flink/api/table/package.scala    |   34 +
 .../api/table/parser/ExpressionParser.scala     |  238 +++++
 .../api/table/plan/ExpandAggregations.scala     |  147 +++
 .../flink/api/table/plan/PlanTranslator.scala   |  156 +++
 .../flink/api/table/plan/operations.scala       |  134 +++
 .../apache/flink/api/table/plan/package.scala   |   24 +
 .../runtime/ExpressionAggregateFunction.scala   |   89 ++
 .../runtime/ExpressionFilterFunction.scala      |   50 +
 .../table/runtime/ExpressionJoinFunction.scala  |   57 +
 .../runtime/ExpressionSelectFunction.scala      |   56 +
 .../flink/api/table/runtime/package.scala       |   23 +
 .../apache/flink/api/table/trees/Analyzer.scala |   43 +
 .../org/apache/flink/api/table/trees/Rule.scala |   30 +
 .../apache/flink/api/table/trees/TreeNode.scala |  120 +++
 .../api/table/typeinfo/RenameOperator.scala     |   36 +
 .../table/typeinfo/RenamingProxyTypeInfo.scala  |  124 +++
 .../api/table/typeinfo/RowSerializer.scala      |  137 +++
 .../flink/api/table/typeinfo/RowTypeInfo.scala  |   51 +
 .../flink/examples/scala/PageRankTable.scala    |  210 ++++
 .../examples/scala/StreamingTableFilter.scala   |   90 ++
 .../flink/examples/scala/TPCHQuery3Table.scala  |  174 +++
 .../flink/examples/scala/WordCountTable.scala   |   45 +
 .../scala/org/apache/flink/explain/Node.java    |  145 +++
 .../apache/flink/explain/PlanJsonParser.java    |  144 +++
 .../api/java/table/test/AggregationsITCase.java |  204 ++++
 .../flink/api/java/table/test/AsITCase.java     |  133 +++
 .../api/java/table/test/CastingITCase.java      |  171 +++
 .../api/java/table/test/ExpressionsITCase.java  |  165 +++
 .../flink/api/java/table/test/FilterITCase.java |  145 +++
 .../table/test/GroupedAggregationsITCase.java   |  126 +++
 .../flink/api/java/table/test/JoinITCase.java   |  182 ++++
 .../api/java/table/test/PojoGroupingITCase.java |   89 ++
 .../flink/api/java/table/test/SelectITCase.java |  148 +++
 .../api/java/table/test/SqlExplainITCase.java   |  206 ++++
 .../table/test/StringExpressionsITCase.java     |  122 +++
 .../flink/api/java/table/test/UnionITCase.java  |  140 +++
 .../scala/table/test/PageRankTableITCase.java   |  100 ++
 .../scala/table/test/TypeExceptionTest.scala    |   42 +
 .../scala/table/test/AggregationsITCase.scala   |  117 ++
 .../flink/api/scala/table/test/AsITCase.scala   |  102 ++
 .../api/scala/table/test/CastingITCase.scala    |  125 +++
 .../scala/table/test/ExpressionsITCase.scala    |  134 +++
 .../api/scala/table/test/FilterITCase.scala     |  132 +++
 .../table/test/GroupedAggreagationsITCase.scala |  115 ++
 .../flink/api/scala/table/test/JoinITCase.scala |  122 +++
 .../api/scala/table/test/SelectITCase.scala     |  122 +++
 .../api/scala/table/test/SqlExplainITCase.scala |   96 ++
 .../table/test/StringExpressionsITCase.scala    |   78 ++
 .../api/scala/table/test/UnionITCase.scala      |  100 ++
 .../typeinfo/RenamingProxyTypeInfoTest.scala    |   75 ++
 .../src/test/scala/resources/testFilter0.out    |   28 +
 .../src/test/scala/resources/testFilter1.out    |   96 ++
 .../src/test/scala/resources/testJoin0.out      |   39 +
 .../src/test/scala/resources/testJoin1.out      |  141 +++
 .../src/test/scala/resources/testUnion0.out     |   38 +
 .../src/test/scala/resources/testUnion1.out     |  140 +++
 flink-libraries/pom.xml                         |    2 +
 flink-scala-shell/pom.xml                       |  250 +++++
 .../org/apache/flink/api/java/JarHelper.java    |  214 ++++
 .../api/java/ScalaShellRemoteEnvironment.java   |  107 ++
 .../apache/flink/api/scala/ILoopCompat.scala    |   31 +
 .../apache/flink/api/scala/ILoopCompat.scala    |   37 +
 .../org/apache/flink/api/scala/FlinkILoop.scala |  232 ++++
 .../org/apache/flink/api/scala/FlinkShell.scala |  159 +++
 .../src/test/resources/log4j-test.properties    |   24 +
 .../src/test/resources/logback-test.xml         |   29 +
 .../flink/api/scala/ScalaShellITCase.scala      |  330 ++++++
 .../scala/ScalaShellLocalStartupITCase.scala    |   85 ++
 .../start-script/start-scala-shell.sh           |   86 ++
 flink-staging/flink-avro/pom.xml                |  205 ----
 .../apache/flink/api/avro/DataInputDecoder.java |  213 ----
 .../flink/api/avro/DataOutputEncoder.java       |  183 ----
 .../api/avro/FSDataInputStreamWrapper.java      |   68 --
 .../api/io/avro/example/AvroTypeExample.java    |  111 --
 .../apache/flink/api/io/avro/example/User.java  |  269 -----
 .../flink/api/java/io/AvroInputFormat.java      |  131 ---
 .../flink/api/java/io/AvroOutputFormat.java     |  100 --
 .../src/test/assembly/test-assembly.xml         |   36 -
 .../api/avro/AvroExternalJarProgramITCase.java  |   88 --
 .../flink/api/avro/AvroOutputFormatITCase.java  |  173 ---
 .../flink/api/avro/EncoderDecoderTest.java      |  528 ---------
 .../avro/testjar/AvroExternalJarProgram.java    |  219 ----
 .../apache/flink/api/io/avro/AvroPojoTest.java  |  255 -----
 .../api/io/avro/AvroRecordInputFormatTest.java  |  299 ------
 .../io/avro/AvroSplittableInputFormatTest.java  |  233 ----
 .../io/AvroInputFormatTypeExtractionTest.java   |   79 --
 .../src/test/resources/avro/user.avsc           |   35 -
 .../src/test/resources/log4j-test.properties    |   27 -
 .../src/test/resources/logback-test.xml         |   29 -
 .../flink-avro/src/test/resources/testdata.avro |  Bin 4572 -> 0 bytes
 flink-staging/flink-fs-tests/pom.xml            |   97 --
 .../flink/hdfstests/FileStateBackendTest.java   |  309 ------
 .../org/apache/flink/hdfstests/HDFSTest.java    |  188 ----
 .../src/test/resources/log4j-test.properties    |   31 -
 .../src/test/resources/log4j.properties         |   31 -
 .../flink-hadoop-compatibility/pom.xml          |   69 --
 .../mapred/HadoopMapFunction.java               |  131 ---
 .../mapred/HadoopReduceCombineFunction.java     |  164 ---
 .../mapred/HadoopReduceFunction.java            |  140 ---
 .../example/HadoopMapredCompatWordCount.java    |  133 ---
 .../mapred/wrapper/HadoopOutputCollector.java   |   64 --
 .../wrapper/HadoopTupleUnwrappingIterator.java  |   93 --
 .../mapreduce/example/WordCount.java            |  120 ---
 .../mapred/HadoopMapFunctionITCase.java         |  182 ----
 .../mapred/HadoopMapredITCase.java              |   47 -
 .../HadoopReduceCombineFunctionITCase.java      |  265 -----
 .../mapred/HadoopReduceFunctionITCase.java      |  213 ----
 .../mapred/HadoopTestData.java                  |   62 --
 .../HadoopTupleUnwrappingIteratorTest.java      |  139 ---
 .../mapreduce/HadoopInputOutputITCase.java      |   47 -
 .../src/test/resources/log4j-test.properties    |   27 -
 .../src/test/resources/logback-test.xml         |   29 -
 flink-staging/flink-hbase/pom.xml               |  220 ----
 .../flink/addons/hbase/TableInputFormat.java    |  256 -----
 .../flink/addons/hbase/TableInputSplit.java     |   89 --
 .../hbase/example/HBaseFlinkTestConstants.java  |   28 -
 .../addons/hbase/example/HBaseReadExample.java  |   92 --
 .../addons/hbase/example/HBaseWriteExample.java |  202 ----
 .../hbase/example/HBaseWriteStreamExample.java  |  118 --
 .../src/test/resources/hbase-site.xml           |   43 -
 .../src/test/resources/log4j.properties         |   23 -
 flink-staging/flink-hcatalog/pom.xml            |  179 ----
 .../flink/hcatalog/HCatInputFormatBase.java     |  410 -------
 .../flink/hcatalog/java/HCatInputFormat.java    |  160 ---
 .../flink/hcatalog/scala/HCatInputFormat.scala  |  229 ----
 flink-staging/flink-jdbc/pom.xml                |   52 -
 .../flink/api/java/io/jdbc/JDBCInputFormat.java |  358 -------
 .../api/java/io/jdbc/JDBCOutputFormat.java      |  270 -----
 .../api/java/io/jdbc/example/JDBCExample.java   |  101 --
 .../api/java/io/jdbc/JDBCInputFormatTest.java   |  195 ----
 .../api/java/io/jdbc/JDBCOutputFormatTest.java  |  240 -----
 .../src/test/resources/log4j-test.properties    |   19 -
 .../src/test/resources/logback-test.xml         |   29 -
 flink-staging/flink-ml/README.md                |   22 -
 flink-staging/flink-ml/pom.xml                  |  162 ---
 .../scala/org/apache/flink/ml/MLUtils.scala     |  122 ---
 .../apache/flink/ml/classification/SVM.scala    |  550 ----------
 .../org/apache/flink/ml/common/Block.scala      |   29 -
 .../apache/flink/ml/common/FlinkMLTools.scala   |  423 --------
 .../apache/flink/ml/common/LabeledVector.scala  |   42 -
 .../apache/flink/ml/common/ParameterMap.scala   |  121 ---
 .../apache/flink/ml/common/WeightVector.scala   |   32 -
 .../apache/flink/ml/common/WithParameters.scala |   26 -
 .../scala/org/apache/flink/ml/math/BLAS.scala   |  291 -----
 .../scala/org/apache/flink/ml/math/Breeze.scala |   88 --
 .../flink/ml/math/BreezeVectorConverter.scala   |   34 -
 .../org/apache/flink/ml/math/DenseMatrix.scala  |  193 ----
 .../org/apache/flink/ml/math/DenseVector.scala  |  184 ----
 .../scala/org/apache/flink/ml/math/Matrix.scala |   69 --
 .../org/apache/flink/ml/math/SparseMatrix.scala |  267 -----
 .../org/apache/flink/ml/math/SparseVector.scala |  285 -----
 .../scala/org/apache/flink/ml/math/Vector.scala |  104 --
 .../apache/flink/ml/math/VectorBuilder.scala    |   57 -
 .../org/apache/flink/ml/math/package.scala      |  110 --
 .../distances/ChebyshevDistanceMetric.scala     |   37 -
 .../distances/CosineDistanceMetric.scala        |   45 -
 .../ml/metrics/distances/DistanceMetric.scala   |   37 -
 .../distances/EuclideanDistanceMetric.scala     |   41 -
 .../distances/ManhattanDistanceMetric.scala     |   37 -
 .../distances/MinkowskiDistanceMetric.scala     |   41 -
 .../SquaredEuclideanDistanceMetric.scala        |   37 -
 .../distances/TanimotoDistanceMetric.scala      |   40 -
 .../flink/ml/optimization/GradientDescent.scala |  350 ------
 .../flink/ml/optimization/LossFunction.scala    |   96 --
 .../ml/optimization/PartialLossFunction.scala   |   67 --
 .../ml/optimization/PredictionFunction.scala    |   40 -
 .../apache/flink/ml/optimization/Solver.scala   |  152 ---
 .../scala/org/apache/flink/ml/package.scala     |  119 ---
 .../flink/ml/pipeline/ChainedPredictor.scala    |  139 ---
 .../flink/ml/pipeline/ChainedTransformer.scala  |  110 --
 .../apache/flink/ml/pipeline/Estimator.scala    |  181 ----
 .../apache/flink/ml/pipeline/Predictor.scala    |  258 -----
 .../apache/flink/ml/pipeline/Transformer.scala  |  164 ---
 .../flink/ml/preprocessing/MinMaxScaler.scala   |  265 -----
 .../ml/preprocessing/PolynomialFeatures.scala   |  209 ----
 .../flink/ml/preprocessing/StandardScaler.scala |  302 ------
 .../apache/flink/ml/recommendation/ALS.scala    | 1009 ------------------
 .../regression/MultipleLinearRegression.scala   |  212 ----
 .../src/test/resources/log4j-test.properties    |   38 -
 .../src/test/resources/logback-test.xml         |   42 -
 .../org/apache/flink/ml/MLUtilsSuite.scala      |  112 --
 .../ml/classification/Classification.scala      |  133 ---
 .../flink/ml/classification/SVMITSuite.scala    |  104 --
 .../flink/ml/common/FlinkMLToolsSuite.scala     |   60 --
 .../apache/flink/ml/math/BreezeMathSuite.scala  |   98 --
 .../apache/flink/ml/math/DenseMatrixSuite.scala |   86 --
 .../apache/flink/ml/math/DenseVectorSuite.scala |  200 ----
 .../flink/ml/math/SparseMatrixSuite.scala       |  134 ---
 .../flink/ml/math/SparseVectorSuite.scala       |  227 ----
 .../metrics/distances/DistanceMetricSuite.scala |   95 --
 .../optimization/GradientDescentITSuite.scala   |  245 -----
 .../ml/optimization/LossFunctionITSuite.scala   |   51 -
 .../PredictionFunctionITSuite.scala             |   62 --
 .../flink/ml/pipeline/PipelineITSuite.scala     |  211 ----
 .../ml/preprocessing/MinMaxScalerITSuite.scala  |  243 -----
 .../PolynomialFeaturesITSuite.scala             |  124 ---
 .../preprocessing/StandardScalerITSuite.scala   |  166 ---
 .../flink/ml/recommendation/ALSITSuite.scala    |   76 --
 .../ml/recommendation/Recommendation.scala      |   90 --
 .../MultipleLinearRegressionITSuite.scala       |  133 ---
 .../flink/ml/regression/RegressionData.scala    |  191 ----
 flink-staging/flink-scala-shell/pom.xml         |  250 -----
 .../org/apache/flink/api/java/JarHelper.java    |  214 ----
 .../api/java/ScalaShellRemoteEnvironment.java   |  107 --
 .../apache/flink/api/scala/ILoopCompat.scala    |   31 -
 .../apache/flink/api/scala/ILoopCompat.scala    |   37 -
 .../org/apache/flink/api/scala/FlinkILoop.scala |  232 ----
 .../org/apache/flink/api/scala/FlinkShell.scala |  159 ---
 .../src/test/resources/log4j-test.properties    |   24 -
 .../src/test/resources/logback-test.xml         |   29 -
 .../flink/api/scala/ScalaShellITCase.scala      |  330 ------
 .../scala/ScalaShellLocalStartupITCase.scala    |   85 --
 .../start-script/start-scala-shell.sh           |   86 --
 flink-staging/flink-table/pom.xml               |  258 -----
 .../flink/api/java/table/package-info.java      |   60 --
 .../apache/flink/api/table/package-info.java    |   33 -
 .../flink/examples/java/JavaTableExample.java   |   71 --
 .../api/java/table/JavaBatchTranslator.scala    |  346 ------
 .../java/table/JavaStreamingTranslator.scala    |  241 -----
 .../flink/api/java/table/TableEnvironment.scala |  111 --
 .../api/scala/table/DataSetConversions.scala    |   67 --
 .../api/scala/table/DataStreamConversions.scala |   68 --
 .../api/scala/table/ScalaBatchTranslator.scala  |   68 --
 .../scala/table/ScalaStreamingTranslator.scala  |   58 -
 .../api/scala/table/TableConversions.scala      |   47 -
 .../flink/api/scala/table/expressionDsl.scala   |  124 ---
 .../apache/flink/api/scala/table/package.scala  |  105 --
 .../flink/api/table/ExpressionException.scala   |   23 -
 .../scala/org/apache/flink/api/table/Row.scala  |   38 -
 .../org/apache/flink/api/table/Table.scala      |  294 -----
 .../apache/flink/api/table/TableConfig.scala    |   66 --
 .../table/codegen/ExpressionCodeGenerator.scala |  829 --------------
 .../api/table/codegen/GenerateFilter.scala      |   99 --
 .../flink/api/table/codegen/GenerateJoin.scala  |  171 ---
 .../table/codegen/GenerateResultAssembler.scala |  119 ---
 .../api/table/codegen/GenerateSelect.scala      |   84 --
 .../flink/api/table/codegen/Indenter.scala      |   54 -
 .../flink/api/table/codegen/package.scala       |   25 -
 .../apache/flink/api/table/explain/Node.java    |  145 ---
 .../flink/api/table/explain/PlanJsonParser.java |  144 ---
 .../api/table/expressions/Expression.scala      |   60 --
 .../api/table/expressions/aggregations.scala    |   99 --
 .../analysis/ExtractEquiJoinFields.scala        |   70 --
 .../expressions/analysis/GroupByAnalyzer.scala  |   51 -
 .../expressions/analysis/InsertAutoCasts.scala  |   92 --
 .../analysis/PredicateAnalyzer.scala            |   35 -
 .../analysis/ResolveFieldReferences.scala       |   60 --
 .../analysis/SelectionAnalyzer.scala            |   36 -
 .../table/expressions/analysis/TypeCheck.scala  |   57 -
 .../expressions/analysis/VerifyBoolean.scala    |   41 -
 .../analysis/VerifyNoAggregates.scala           |   53 -
 .../analysis/VerifyNoNestedAggregates.scala     |   54 -
 .../api/table/expressions/arithmetic.scala      |  145 ---
 .../flink/api/table/expressions/cast.scala      |   34 -
 .../api/table/expressions/comparison.scala      |   93 --
 .../api/table/expressions/fieldExpression.scala |   41 -
 .../flink/api/table/expressions/literals.scala  |   42 -
 .../flink/api/table/expressions/logic.scala     |   58 -
 .../flink/api/table/expressions/package.scala   |   29 -
 .../table/expressions/stringExpressions.scala   |   46 -
 .../org/apache/flink/api/table/package.scala    |   34 -
 .../api/table/parser/ExpressionParser.scala     |  238 -----
 .../api/table/plan/ExpandAggregations.scala     |  147 ---
 .../flink/api/table/plan/PlanTranslator.scala   |  156 ---
 .../flink/api/table/plan/operations.scala       |  134 ---
 .../apache/flink/api/table/plan/package.scala   |   24 -
 .../runtime/ExpressionAggregateFunction.scala   |   89 --
 .../runtime/ExpressionFilterFunction.scala      |   50 -
 .../table/runtime/ExpressionJoinFunction.scala  |   57 -
 .../runtime/ExpressionSelectFunction.scala      |   56 -
 .../flink/api/table/runtime/package.scala       |   23 -
 .../apache/flink/api/table/trees/Analyzer.scala |   43 -
 .../org/apache/flink/api/table/trees/Rule.scala |   30 -
 .../apache/flink/api/table/trees/TreeNode.scala |  120 ---
 .../api/table/typeinfo/RenameOperator.scala     |   36 -
 .../table/typeinfo/RenamingProxyTypeInfo.scala  |  124 ---
 .../api/table/typeinfo/RowSerializer.scala      |  137 ---
 .../flink/api/table/typeinfo/RowTypeInfo.scala  |   51 -
 .../flink/examples/scala/PageRankTable.scala    |  210 ----
 .../examples/scala/StreamingTableFilter.scala   |   90 --
 .../flink/examples/scala/TPCHQuery3Table.scala  |  174 ---
 .../flink/examples/scala/WordCountTable.scala   |   45 -
 .../api/java/table/test/AggregationsITCase.java |  204 ----
 .../flink/api/java/table/test/AsITCase.java     |  133 ---
 .../api/java/table/test/CastingITCase.java      |  171 ---
 .../api/java/table/test/ExpressionsITCase.java  |  165 ---
 .../flink/api/java/table/test/FilterITCase.java |  145 ---
 .../table/test/GroupedAggregationsITCase.java   |  126 ---
 .../flink/api/java/table/test/JoinITCase.java   |  182 ----
 .../api/java/table/test/PojoGroupingITCase.java |   89 --
 .../flink/api/java/table/test/SelectITCase.java |  148 ---
 .../api/java/table/test/SqlExplainITCase.java   |  206 ----
 .../table/test/StringExpressionsITCase.java     |  122 ---
 .../flink/api/java/table/test/UnionITCase.java  |  140 ---
 .../scala/table/test/PageRankTableITCase.java   |  100 --
 .../scala/table/test/TypeExceptionTest.scala    |   42 -
 .../scala/table/test/AggregationsITCase.scala   |  117 --
 .../flink/api/scala/table/test/AsITCase.scala   |  102 --
 .../api/scala/table/test/CastingITCase.scala    |  125 ---
 .../scala/table/test/ExpressionsITCase.scala    |  134 ---
 .../api/scala/table/test/FilterITCase.scala     |  132 ---
 .../table/test/GroupedAggreagationsITCase.scala |  115 --
 .../flink/api/scala/table/test/JoinITCase.scala |  122 ---
 .../api/scala/table/test/SelectITCase.scala     |  122 ---
 .../api/scala/table/test/SqlExplainITCase.scala |   96 --
 .../table/test/StringExpressionsITCase.scala    |   78 --
 .../api/scala/table/test/UnionITCase.scala      |  100 --
 .../typeinfo/RenamingProxyTypeInfoTest.scala    |   75 --
 .../src/test/scala/resources/testFilter0.out    |   28 -
 .../src/test/scala/resources/testFilter1.out    |   96 --
 .../src/test/scala/resources/testJoin0.out      |   39 -
 .../src/test/scala/resources/testJoin1.out      |  141 ---
 .../src/test/scala/resources/testUnion0.out     |   38 -
 .../src/test/scala/resources/testUnion1.out     |  140 ---
 flink-staging/flink-tez/pom.xml                 |  224 ----
 .../flink-tez/src/assembly/flink-fat-jar.xml    |   42 -
 .../flink/tez/client/LocalTezEnvironment.java   |   76 --
 .../flink/tez/client/RemoteTezEnvironment.java  |   83 --
 .../apache/flink/tez/client/TezExecutor.java    |  219 ----
 .../flink/tez/client/TezExecutorTool.java       |   80 --
 .../flink/tez/dag/FlinkBroadcastEdge.java       |   70 --
 .../flink/tez/dag/FlinkDataSinkVertex.java      |   61 --
 .../flink/tez/dag/FlinkDataSourceVertex.java    |   82 --
 .../org/apache/flink/tez/dag/FlinkEdge.java     |   45 -
 .../apache/flink/tez/dag/FlinkForwardEdge.java  |   71 --
 .../flink/tez/dag/FlinkPartitionEdge.java       |   71 --
 .../flink/tez/dag/FlinkProcessorVertex.java     |   61 --
 .../apache/flink/tez/dag/FlinkUnionVertex.java  |   61 --
 .../org/apache/flink/tez/dag/FlinkVertex.java   |  114 --
 .../apache/flink/tez/dag/TezDAGGenerator.java   |  460 --------
 .../tez/examples/ConnectedComponentsStep.java   |  203 ----
 .../flink/tez/examples/ExampleDriver.java       |  119 ---
 .../flink/tez/examples/PageRankBasicStep.java   |  241 -----
 .../apache/flink/tez/examples/TPCHQuery3.java   |  224 ----
 .../examples/TransitiveClosureNaiveStep.java    |  135 ---
 .../apache/flink/tez/examples/WordCount.java    |  129 ---
 .../flink/tez/runtime/DataSinkProcessor.java    |  228 ----
 .../flink/tez/runtime/DataSourceProcessor.java  |  190 ----
 .../flink/tez/runtime/RegularProcessor.java     |  138 ---
 .../tez/runtime/TezRuntimeEnvironment.java      |   44 -
 .../org/apache/flink/tez/runtime/TezTask.java   |  578 ----------
 .../apache/flink/tez/runtime/TezTaskConfig.java |  163 ---
 .../flink/tez/runtime/UnionProcessor.java       |  106 --
 .../flink/tez/runtime/input/FlinkInput.java     |  139 ---
 .../runtime/input/FlinkInputSplitGenerator.java |   94 --
 .../tez/runtime/input/TezReaderIterator.java    |   66 --
 .../tez/runtime/output/SimplePartitioner.java   |   35 -
 .../tez/runtime/output/TezChannelSelector.java  |   36 -
 .../tez/runtime/output/TezOutputCollector.java  |   72 --
 .../tez/runtime/output/TezOutputEmitter.java    |  190 ----
 .../apache/flink/tez/util/DummyInvokable.java   |   51 -
 .../apache/flink/tez/util/EncodingUtils.java    |   64 --
 .../flink/tez/util/FlinkSerialization.java      |  310 ------
 .../src/main/resources/log4j.properties         |   30 -
 .../tez/test/ConnectedComponentsStepITCase.java |   83 --
 .../flink/tez/test/PageRankBasicStepITCase.java |   54 -
 .../flink/tez/test/TezProgramTestBase.java      |  108 --
 .../flink/tez/test/WebLogAnalysisITCase.java    |   48 -
 .../apache/flink/tez/test/WordCountITCase.java  |   47 -
 .../src/test/resources/log4j-test.properties    |   30 -
 .../src/test/resources/logback-test.xml         |   37 -
 flink-staging/pom.xml                           |   72 --
 pom.xml                                         |   15 +-
 597 files changed, 37559 insertions(+), 37581 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 638f71f..8e30de7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,7 +19,7 @@ tmp
 _site
 docs/api
 build-target
-flink-staging/flink-avro/src/test/java/org/apache/flink/api/io/avro/generated/
+flink-batch-connectors/flink-avro/src/test/java/org/apache/flink/api/io/avro/generated/
 flink-runtime-web/web-dashboard/node_modules/
 flink-runtime-web/web-dashboard/bower_components/
 atlassian-ide-plugin.xml

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/docs/apis/hadoop_compatibility.md
----------------------------------------------------------------------
diff --git a/docs/apis/hadoop_compatibility.md b/docs/apis/hadoop_compatibility.md
index d88dc0b..aca1edf 100644
--- a/docs/apis/hadoop_compatibility.md
+++ b/docs/apis/hadoop_compatibility.md
@@ -46,7 +46,7 @@ The code is located in `org.apache.flink.api.java.hadoop` and
 `org.apache.flink.api.scala.hadoop` in an additional sub-package for the
 `mapred` and `mapreduce` API.
 
-Support for Hadoop Mappers and Reducers is contained in the `flink-staging`
+Support for Hadoop Mappers and Reducers is contained in the `flink-hadoop-compatibility`
 Maven module.
 This code resides in the `org.apache.flink.hadoopcompatibility`
 package.

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/docs/apis/streaming_guide.md
----------------------------------------------------------------------
diff --git a/docs/apis/streaming_guide.md b/docs/apis/streaming_guide.md
index de7d46e..ba7a7f8 100644
--- a/docs/apis/streaming_guide.md
+++ b/docs/apis/streaming_guide.md
@@ -3916,7 +3916,7 @@ After installing Docker an image can be pulled for each connector. Containers ca
 For the easiest setup, create a jar with all the dependencies of the *flink-streaming-connectors* project.
 
 ~~~bash
-cd /PATH/TO/GIT/flink/flink-staging/flink-streaming-connectors
+cd /PATH/TO/GIT/flink/flink-streaming-connectors
 mvn assembly:assembly
 ~~~bash
 

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/docs/internals/general_arch.md
----------------------------------------------------------------------
diff --git a/docs/internals/general_arch.md b/docs/internals/general_arch.md
index 4628e0b..a81ae85 100644
--- a/docs/internals/general_arch.md
+++ b/docs/internals/general_arch.md
@@ -97,9 +97,7 @@ In addition to the projects listed in the figure above, Flink currently contains
 - `flink-dist`: The *distribution* project. It defines how to assemble the compiled code, scripts, and other resources
 into the final folder structure that is ready to use.
 
-- `flink-staging`: A series of projects that are in an early version. Currently contains
-among other things projects for YARN support, JDBC data sources and sinks, hadoop compatibility,
-graph specific operators, and HBase connectors.
+- `flink-contrib`: A series of projects that are in an early version.
 
 - `flink-quickstart`: Scripts, maven archetypes, and example programs for the quickstarts and tutorials.
 

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/docs/internals/stream_checkpointing.md
----------------------------------------------------------------------
diff --git a/docs/internals/stream_checkpointing.md b/docs/internals/stream_checkpointing.md
index 48355a1..dbfd341 100644
--- a/docs/internals/stream_checkpointing.md
+++ b/docs/internals/stream_checkpointing.md
@@ -141,7 +141,7 @@ It is possible to let an operator continue processing while it stores its state
 After receiving the checkpoint barriers on its inputs, the operator starts the asynchronous snapshot copying of its state. It immediately emits the barrier to its outputs and continues with the regular stream processing. Once the background copy process has completed, it acknowledges the checkpoint to the checkpoint coordinator (the JobManager). The checkpoint is now only complete after all sinks received the barriers and all stateful operators acknowledged their completed backup (which may be later than the barriers reaching the sinks).
 
 User-defined state that is used through the key/value state abstraction can be snapshotted *asynchronously*.
-User functions that implement the interface {% gh_link /flink-staging/flink-streaming/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/checkpoint/Checkpointed.java "Checkpointed" %} will be snapshotted *synchronously*, while functions that implement {% gh_link /flink-staging/flink-streaming/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/checkpoint/CheckpointedAsynchronously.java "CheckpointedAsynchronously" %} will be snapshotted *asynchronously*. Note that for the latter, the user function must guarantee that any future modifications to its state to not affect the state object returned by the `snapshotState()` method.
+User functions that implement the interface {% gh_link /flink-FIXME/flink-streaming/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/checkpoint/Checkpointed.java "Checkpointed" %} will be snapshotted *synchronously*, while functions that implement {% gh_link /flink-FIXME/flink-streaming/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/checkpoint/CheckpointedAsynchronously.java "CheckpointedAsynchronously" %} will be snapshotted *asynchronously*. Note that for the latter, the user function must guarantee that any future modifications to its state to not affect the state object returned by the `snapshotState()` method.
 
 
 

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/docs/setup/flink_on_tez.md
----------------------------------------------------------------------
diff --git a/docs/setup/flink_on_tez.md b/docs/setup/flink_on_tez.md
index afbd147..daa0d7d 100644
--- a/docs/setup/flink_on_tez.md
+++ b/docs/setup/flink_on_tez.md
@@ -23,8 +23,9 @@ under the License.
 <a href="#top"></a>
 
 You can run Flink using Tez as an execution environment. Flink on Tez 
-is currently included in *flink-staging* in alpha. All classes are
-located in the *org.apache.flink.tez* package.
+is currently included in the *flink-contrib* module, which means it is 
+in alpha stability. All classes are located in the *org.apache.flink.tez* 
+package.
 
 * This will be replaced by the TOC
 {:toc}
@@ -101,7 +102,7 @@ public class WordCountExample {
   using `mvn -DskipTests clean package -Pinclude-tez -Dhadoop.version=X.X.X -Dtez.version=X.X.X`.
   Make sure that the Hadoop version matches the version that Tez uses.
   Obtain the jar file contained in the Flink distribution under
-  `flink-staging/flink-tez/target/flink-tez-x.y.z-flink-fat-jar.jar` 
+  `flink-contrib/flink-tez/target/flink-tez-x.y.z-flink-fat-jar.jar` 
   and upload it to some directory in HDFS. E.g., to upload the file
   to the directory `/apps`, execute
   {% highlight bash %}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/pom.xml
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/pom.xml b/flink-batch-connectors/flink-avro/pom.xml
new file mode 100644
index 0000000..e9c6499
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/pom.xml
@@ -0,0 +1,205 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	
+	<modelVersion>4.0.0</modelVersion>
+	
+	<parent>
+		<groupId>org.apache.flink</groupId>
+		<artifactId>flink-batch-connectors</artifactId>
+		<version>1.0-SNAPSHOT</version>
+		<relativePath>..</relativePath>
+	</parent>
+
+	<artifactId>flink-avro</artifactId>
+	<name>flink-avro</name>
+
+	<packaging>jar</packaging>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.apache.flink</groupId>
+			<artifactId>flink-java</artifactId>
+			<version>${project.version}</version>
+		</dependency>
+
+
+		<dependency>
+			<groupId>org.apache.flink</groupId>
+			<artifactId>flink-clients</artifactId>
+			<version>${project.version}</version>
+		</dependency>
+
+		<dependency>
+			<groupId>org.apache.avro</groupId>
+			<artifactId>avro</artifactId>
+			<!-- version is derived from base module -->
+		</dependency>
+
+		<dependency>
+			<groupId>org.apache.flink</groupId>
+			<artifactId>flink-core</artifactId>
+			<version>${project.version}</version>
+			<type>test-jar</type>
+			<scope>test</scope>
+		</dependency>
+		
+		<dependency>
+			<groupId>org.apache.flink</groupId>
+			<artifactId>flink-test-utils</artifactId>
+			<version>${project.version}</version>
+			<scope>test</scope>
+		</dependency>
+		
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<artifactId>maven-assembly-plugin</artifactId>
+				<executions>
+					<execution>
+						<id>create-test-dependency</id>
+						<phase>process-test-classes</phase>
+						<goals>
+							<goal>single</goal>
+						</goals>
+						<configuration>
+							<archive>
+								<manifest>
+									<mainClass>org.apache.flink.api.avro.testjar.AvroExternalJarProgram</mainClass>
+								</manifest>
+							</archive>
+							<finalName>maven</finalName>
+							<attach>false</attach>
+							<descriptors>
+								<descriptor>src/test/assembly/test-assembly.xml</descriptor>
+							</descriptors>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<!--Remove the AvroExternalJarProgram code from the test-classes directory since it musn't be in the
+			classpath when running the tests to actually test whether the user code class loader
+			is properly used.-->
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version><!--$NO-MVN-MAN-VER$-->
+				<executions>
+					<execution>
+						<id>remove-avroexternalprogram</id>
+						<phase>process-test-classes</phase>
+						<goals>
+							<goal>clean</goal>
+						</goals>
+						<configuration>
+							<excludeDefaultDirectories>true</excludeDefaultDirectories>
+							<filesets>
+								<fileset>
+									<directory>${project.build.testOutputDirectory}</directory>
+									<includes>
+										<include>**/testjar/*.class</include>
+									</includes>
+								</fileset>
+							</filesets>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<!-- Generate Test class from avro schema -->
+			<plugin>
+				<groupId>org.apache.avro</groupId>
+				<artifactId>avro-maven-plugin</artifactId>
+				<version>1.7.7</version>
+				<executions>
+					<execution>
+						<phase>generate-sources</phase>
+						<goals>
+							<goal>schema</goal>
+						</goals>
+						<configuration>
+							<testSourceDirectory>${project.basedir}/src/test/resources/avro</testSourceDirectory>
+							<testOutputDirectory>${project.basedir}/src/test/java/</testOutputDirectory>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+		
+		<pluginManagement>
+			<plugins>
+				<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+				<plugin>
+					<groupId>org.eclipse.m2e</groupId>
+					<artifactId>lifecycle-mapping</artifactId>
+					<version>1.0.0</version>
+					<configuration>
+						<lifecycleMappingMetadata>
+							<pluginExecutions>
+								<pluginExecution>
+									<pluginExecutionFilter>
+										<groupId>org.apache.maven.plugins</groupId>
+										<artifactId>maven-assembly-plugin</artifactId>
+										<versionRange>[2.4,)</versionRange>
+										<goals>
+											<goal>single</goal>
+										</goals>
+									</pluginExecutionFilter>
+									<action>
+										<ignore/>
+									</action>
+								</pluginExecution>
+								<pluginExecution>
+									<pluginExecutionFilter>
+										<groupId>org.apache.maven.plugins</groupId>
+										<artifactId>maven-clean-plugin</artifactId>
+										<versionRange>[1,)</versionRange>
+										<goals>
+											<goal>clean</goal>
+										</goals>
+									</pluginExecutionFilter>
+									<action>
+										<ignore/>
+									</action>
+								</pluginExecution>
+								<pluginExecution>
+									<pluginExecutionFilter>
+										<groupId>org.apache.avro</groupId>
+										<artifactId>avro-maven-plugin</artifactId>
+										<versionRange>[1.7.7,)</versionRange>
+										<goals>
+											<goal>schema</goal>
+										</goals>
+									</pluginExecutionFilter>
+									<action>
+										<ignore/>
+									</action>
+								</pluginExecution>
+							</pluginExecutions>
+						</lifecycleMappingMetadata>
+					</configuration>
+				</plugin>
+			</plugins>
+		</pluginManagement>
+	</build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/DataInputDecoder.java
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/DataInputDecoder.java b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/DataInputDecoder.java
new file mode 100644
index 0000000..59da4cb
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/DataInputDecoder.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.api.avro;
+
+import java.io.DataInput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.io.Decoder;
+import org.apache.avro.util.Utf8;
+
+
+public class DataInputDecoder extends Decoder {
+	
+	private final Utf8 stringDecoder = new Utf8();
+	
+	private DataInput in;
+	
+	public void setIn(DataInput in) {
+		this.in = in;
+	}
+
+	// --------------------------------------------------------------------------------------------
+	// primitives
+	// --------------------------------------------------------------------------------------------
+	
+	@Override
+	public void readNull() {}
+	
+
+	@Override
+	public boolean readBoolean() throws IOException {
+		return in.readBoolean();
+	}
+
+	@Override
+	public int readInt() throws IOException {
+		return in.readInt();
+	}
+
+	@Override
+	public long readLong() throws IOException {
+		return in.readLong();
+	}
+
+	@Override
+	public float readFloat() throws IOException {
+		return in.readFloat();
+	}
+
+	@Override
+	public double readDouble() throws IOException {
+		return in.readDouble();
+	}
+	
+	@Override
+	public int readEnum() throws IOException {
+		return readInt();
+	}
+	
+	// --------------------------------------------------------------------------------------------
+	// bytes
+	// --------------------------------------------------------------------------------------------
+
+	@Override
+	public void readFixed(byte[] bytes, int start, int length) throws IOException {
+		in.readFully(bytes, start, length);
+	}
+	
+	@Override
+	public ByteBuffer readBytes(ByteBuffer old) throws IOException {
+		int length = readInt();
+		ByteBuffer result;
+		if (old != null && length <= old.capacity() && old.hasArray()) {
+			result = old;
+			result.clear();
+		} else {
+			result = ByteBuffer.allocate(length);
+		}
+		in.readFully(result.array(), result.arrayOffset() + result.position(), length);
+		result.limit(length);
+		return result;
+	}
+	
+	
+	@Override
+	public void skipFixed(int length) throws IOException {
+		skipBytes(length);
+	}
+	
+	@Override
+	public void skipBytes() throws IOException {
+		int num = readInt();
+		skipBytes(num);
+	}
+	
+	// --------------------------------------------------------------------------------------------
+	// strings
+	// --------------------------------------------------------------------------------------------
+	
+	
+	@Override
+	public Utf8 readString(Utf8 old) throws IOException {
+		int length = readInt();
+		Utf8 result = (old != null ? old : new Utf8());
+		result.setByteLength(length);
+		
+		if (length > 0) {
+			in.readFully(result.getBytes(), 0, length);
+		}
+		
+		return result;
+	}
+
+	@Override
+	public String readString() throws IOException {
+		return readString(stringDecoder).toString();
+	}
+
+	@Override
+	public void skipString() throws IOException {
+		int len = readInt();
+		skipBytes(len);
+	}
+
+	// --------------------------------------------------------------------------------------------
+	// collection types
+	// --------------------------------------------------------------------------------------------
+
+	@Override
+	public long readArrayStart() throws IOException {
+		return readVarLongCount(in);
+	}
+
+	@Override
+	public long arrayNext() throws IOException {
+		return readVarLongCount(in);
+	}
+
+	@Override
+	public long skipArray() throws IOException {
+		return readVarLongCount(in);
+	}
+
+	@Override
+	public long readMapStart() throws IOException {
+		return readVarLongCount(in);
+	}
+
+	@Override
+	public long mapNext() throws IOException {
+		return readVarLongCount(in);
+	}
+
+	@Override
+	public long skipMap() throws IOException {
+		return readVarLongCount(in);
+	}
+	
+	// --------------------------------------------------------------------------------------------
+	// union
+	// --------------------------------------------------------------------------------------------
+
+	@Override
+	public int readIndex() throws IOException {
+		return readInt();
+	}
+	
+	// --------------------------------------------------------------------------------------------
+	// utils
+	// --------------------------------------------------------------------------------------------
+	
+	private void skipBytes(int num) throws IOException {
+		while (num > 0) {
+			num -= in.skipBytes(num);
+		}
+	}
+	
+	public static long readVarLongCount(DataInput in) throws IOException {
+		long value = in.readUnsignedByte();
+
+		if ((value & 0x80) == 0) {
+			return value;
+		}
+		else {
+			long curr;
+			int shift = 7;
+			value = value & 0x7f;
+			while (((curr = in.readUnsignedByte()) & 0x80) != 0){
+				value |= (curr & 0x7f) << shift;
+				shift += 7;
+			}
+			value |= curr << shift;
+			return value;
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/DataOutputEncoder.java
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/DataOutputEncoder.java b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/DataOutputEncoder.java
new file mode 100644
index 0000000..0102cc1
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/DataOutputEncoder.java
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.api.avro;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.io.Encoder;
+import org.apache.avro.util.Utf8;
+
+
+public final class DataOutputEncoder extends Encoder implements java.io.Serializable {
+	
+	private static final long serialVersionUID = 1L;
+	
+	private DataOutput out;
+	
+	
+	public void setOut(DataOutput out) {
+		this.out = out;
+	}
+
+
+	@Override
+	public void flush() throws IOException {}
+
+	// --------------------------------------------------------------------------------------------
+	// primitives
+	// --------------------------------------------------------------------------------------------
+	
+	@Override
+	public void writeNull() {}
+	
+
+	@Override
+	public void writeBoolean(boolean b) throws IOException {
+		out.writeBoolean(b);
+	}
+
+	@Override
+	public void writeInt(int n) throws IOException {
+		out.writeInt(n);
+	}
+
+	@Override
+	public void writeLong(long n) throws IOException {
+		out.writeLong(n);
+	}
+
+	@Override
+	public void writeFloat(float f) throws IOException {
+		out.writeFloat(f);
+	}
+
+	@Override
+	public void writeDouble(double d) throws IOException {
+		out.writeDouble(d);
+	}
+	
+	@Override
+	public void writeEnum(int e) throws IOException {
+		out.writeInt(e);
+	}
+	
+	
+	// --------------------------------------------------------------------------------------------
+	// bytes
+	// --------------------------------------------------------------------------------------------
+
+	@Override
+	public void writeFixed(byte[] bytes, int start, int len) throws IOException {
+		out.write(bytes, start, len);
+	}
+	
+	@Override
+	public void writeBytes(byte[] bytes, int start, int len) throws IOException {
+		out.writeInt(len);
+		if (len > 0) {
+			out.write(bytes, start, len);
+		}
+	}
+	
+	@Override
+	public void writeBytes(ByteBuffer bytes) throws IOException {
+		int num = bytes.remaining();
+		out.writeInt(num);
+		
+		if (num > 0) {
+			writeFixed(bytes);
+		}
+	}
+	
+	// --------------------------------------------------------------------------------------------
+	// strings
+	// --------------------------------------------------------------------------------------------
+
+	@Override
+	public void writeString(String str) throws IOException {
+		byte[] bytes = Utf8.getBytesFor(str);
+		writeBytes(bytes, 0, bytes.length);
+	}
+	
+	@Override
+	public void writeString(Utf8 utf8) throws IOException {
+		writeBytes(utf8.getBytes(), 0, utf8.getByteLength());
+		
+	}
+
+	// --------------------------------------------------------------------------------------------
+	// collection types
+	// --------------------------------------------------------------------------------------------
+
+	@Override
+	public void writeArrayStart() {}
+
+	@Override
+	public void setItemCount(long itemCount) throws IOException {
+		if (itemCount > 0) {
+			writeVarLongCount(out, itemCount);
+		}
+	}
+
+	@Override
+	public void startItem() {}
+
+	@Override
+	public void writeArrayEnd() throws IOException {
+		// write a single byte 0, shortcut for a var-length long of 0
+		out.write(0);
+	}
+
+	@Override
+	public void writeMapStart() {}
+
+	@Override
+	public void writeMapEnd() throws IOException {
+		// write a single byte 0, shortcut for a var-length long of 0
+		out.write(0);
+	}
+
+	// --------------------------------------------------------------------------------------------
+	// union
+	// --------------------------------------------------------------------------------------------
+	
+	@Override
+	public void writeIndex(int unionIndex) throws IOException {
+		out.writeInt(unionIndex);
+	}
+	
+	// --------------------------------------------------------------------------------------------
+	// utils
+	// --------------------------------------------------------------------------------------------
+		
+	
+	public static void writeVarLongCount(DataOutput out, long val) throws IOException {
+		if (val < 0) {
+			throw new IOException("Illegal count (must be non-negative): " + val);
+		}
+		
+		while ((val & ~0x7FL) != 0) {
+			out.write(((int) val) | 0x80);
+			val >>>= 7;
+		}
+		out.write((int) val);
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/FSDataInputStreamWrapper.java
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/FSDataInputStreamWrapper.java b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/FSDataInputStreamWrapper.java
new file mode 100644
index 0000000..709c4f1
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/avro/FSDataInputStreamWrapper.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.flink.api.avro;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+import org.apache.avro.file.SeekableInput;
+import org.apache.flink.core.fs.FSDataInputStream;
+
+
+/**
+ * Code copy pasted from org.apache.avro.mapred.FSInput (which is Apache licensed as well)
+ * 
+ * The wrapper keeps track of the position in the data stream.
+ */
+public class FSDataInputStreamWrapper implements Closeable, SeekableInput {
+	private final FSDataInputStream stream;
+	private long pos;
+	private long len;
+
+	public FSDataInputStreamWrapper(FSDataInputStream stream, long len) {
+		this.stream = stream;
+		this.pos = 0;
+		this.len = len;
+	}
+
+	public long length() throws IOException {
+		return this.len;
+	}
+
+	public int read(byte[] b, int off, int len) throws IOException {
+		int read;
+		read = stream.read(b, off, len);
+		pos += read;
+		return read;
+	}
+
+	public void seek(long p) throws IOException {
+		stream.seek(p);
+		pos = p;
+	}
+
+	public long tell() throws IOException {
+		return pos;
+	}
+
+	public void close() throws IOException {
+		stream.close();
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/io/avro/example/AvroTypeExample.java
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/io/avro/example/AvroTypeExample.java b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/io/avro/example/AvroTypeExample.java
new file mode 100644
index 0000000..6affeec
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/io/avro/example/AvroTypeExample.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.api.io.avro.example;
+
+import java.io.IOException;
+import java.util.Random;
+
+import org.apache.flink.api.common.functions.GroupReduceFunction;
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.io.GenericInputFormat;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.util.Collector;
+
+@SuppressWarnings("serial")
+public class AvroTypeExample {
+	
+	
+	public static void main(String[] args) throws Exception {
+		
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+		
+		DataSet<User> users = env.createInput(new UserGeneratingInputFormat());
+		
+		users
+			.map(new NumberExtractingMapper())
+			.groupBy(1)
+			.reduceGroup(new ConcatenatingReducer())
+			.print();
+		
+		env.execute();
+	}
+	
+	
+	
+	public static final class NumberExtractingMapper implements MapFunction<User, Tuple2<User, Integer>> {
+		
+		@Override
+		public Tuple2<User, Integer> map(User user) {
+			return new Tuple2<User, Integer>(user, user.getFavoriteNumber());
+		}
+	}
+	
+	
+	public static final class ConcatenatingReducer implements GroupReduceFunction<Tuple2<User, Integer>, Tuple2<Integer, String>> {
+
+		@Override
+		public void reduce(Iterable<Tuple2<User, Integer>> values, Collector<Tuple2<Integer, String>> out) throws Exception {
+			int number = 0;
+			StringBuilder colors = new StringBuilder();
+			
+			for (Tuple2<User, Integer> u : values) {
+				number = u.f1;
+				colors.append(u.f0.getFavoriteColor()).append(" - ");
+			}
+			
+			colors.setLength(colors.length() - 3);
+			out.collect(new Tuple2<Integer, String>(number, colors.toString()));
+		}
+	}
+	
+	
+	public static final class UserGeneratingInputFormat extends GenericInputFormat<User> {
+
+		private static final long serialVersionUID = 1L;
+		
+		private static final int NUM = 100;
+		
+		private final Random rnd = new Random(32498562304986L);
+		
+		private static final String[] NAMES = { "Peter", "Bob", "Liddy", "Alexander", "Stan" };
+		
+		private static final String[] COLORS = { "mauve", "crimson", "copper", "sky", "grass" };
+		
+		private int count;
+		
+
+		@Override
+		public boolean reachedEnd() throws IOException {
+			return count >= NUM;
+		}
+
+		@Override
+		public User nextRecord(User reuse) throws IOException {
+			count++;
+			
+			User u = new User();
+			u.setName(NAMES[rnd.nextInt(NAMES.length)]);
+			u.setFavoriteColor(COLORS[rnd.nextInt(COLORS.length)]);
+			u.setFavoriteNumber(rnd.nextInt(87));
+			return u;
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/io/avro/example/User.java
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/io/avro/example/User.java b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/io/avro/example/User.java
new file mode 100644
index 0000000..4608f96
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/io/avro/example/User.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+ * Autogenerated by Avro
+ * 
+ * DO NOT EDIT DIRECTLY
+ */
+package org.apache.flink.api.io.avro.example;  
+@SuppressWarnings("all")
+@org.apache.avro.specific.AvroGenerated
+public class User extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"User\",\"namespace\":\"org.apache.flink.api.avro.example\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"favorite_number\",\"type\":[\"int\",\"null\"]},{\"name\":\"favorite_color\",\"type\":[\"string\",\"null\"]}]}");
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+  @Deprecated public java.lang.CharSequence name;
+  @Deprecated public java.lang.Integer favorite_number;
+  @Deprecated public java.lang.CharSequence favorite_color;
+
+  /**
+   * Default constructor.  Note that this does not initialize fields
+   * to their default values from the schema.  If that is desired then
+   * one should use {@link #newBuilder()}. 
+   */
+  public User() {}
+
+  /**
+   * All-args constructor.
+   */
+  public User(java.lang.CharSequence name, java.lang.Integer favorite_number, java.lang.CharSequence favorite_color) {
+    this.name = name;
+    this.favorite_number = favorite_number;
+    this.favorite_color = favorite_color;
+  }
+
+  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+  // Used by DatumWriter.  Applications should not call. 
+  public java.lang.Object get(int field$) {
+    switch (field$) {
+    case 0: return name;
+    case 1: return favorite_number;
+    case 2: return favorite_color;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+  // Used by DatumReader.  Applications should not call. 
+  @SuppressWarnings(value="unchecked")
+  public void put(int field$, java.lang.Object value$) {
+    switch (field$) {
+    case 0: name = (java.lang.CharSequence)value$; break;
+    case 1: favorite_number = (java.lang.Integer)value$; break;
+    case 2: favorite_color = (java.lang.CharSequence)value$; break;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+
+  /**
+   * Gets the value of the 'name' field.
+   */
+  public java.lang.CharSequence getName() {
+    return name;
+  }
+
+  /**
+   * Sets the value of the 'name' field.
+   * @param value the value to set.
+   */
+  public void setName(java.lang.CharSequence value) {
+    this.name = value;
+  }
+
+  /**
+   * Gets the value of the 'favorite_number' field.
+   */
+  public java.lang.Integer getFavoriteNumber() {
+    return favorite_number;
+  }
+
+  /**
+   * Sets the value of the 'favorite_number' field.
+   * @param value the value to set.
+   */
+  public void setFavoriteNumber(java.lang.Integer value) {
+    this.favorite_number = value;
+  }
+
+  /**
+   * Gets the value of the 'favorite_color' field.
+   */
+  public java.lang.CharSequence getFavoriteColor() {
+    return favorite_color;
+  }
+
+  /**
+   * Sets the value of the 'favorite_color' field.
+   * @param value the value to set.
+   */
+  public void setFavoriteColor(java.lang.CharSequence value) {
+    this.favorite_color = value;
+  }
+
+  /** Creates a new User RecordBuilder */
+  public static org.apache.flink.api.io.avro.example.User.Builder newBuilder() {
+    return new org.apache.flink.api.io.avro.example.User.Builder();
+  }
+  
+  /** Creates a new User RecordBuilder by copying an existing Builder */
+  public static org.apache.flink.api.io.avro.example.User.Builder newBuilder(org.apache.flink.api.io.avro.example.User.Builder other) {
+    return new org.apache.flink.api.io.avro.example.User.Builder(other);
+  }
+  
+  /** Creates a new User RecordBuilder by copying an existing User instance */
+  public static org.apache.flink.api.io.avro.example.User.Builder newBuilder(org.apache.flink.api.io.avro.example.User other) {
+    return new org.apache.flink.api.io.avro.example.User.Builder(other);
+  }
+  
+  /**
+   * RecordBuilder for User instances.
+   */
+  public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<User>
+    implements org.apache.avro.data.RecordBuilder<User> {
+
+    private java.lang.CharSequence name;
+    private java.lang.Integer favorite_number;
+    private java.lang.CharSequence favorite_color;
+
+    /** Creates a new Builder */
+    private Builder() {
+      super(org.apache.flink.api.io.avro.example.User.SCHEMA$);
+    }
+    
+    /** Creates a Builder by copying an existing Builder */
+    private Builder(org.apache.flink.api.io.avro.example.User.Builder other) {
+      super(other);
+      if (isValidValue(fields()[0], other.name)) {
+        this.name = data().deepCopy(fields()[0].schema(), other.name);
+        fieldSetFlags()[0] = true;
+      }
+      if (isValidValue(fields()[1], other.favorite_number)) {
+        this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number);
+        fieldSetFlags()[1] = true;
+      }
+      if (isValidValue(fields()[2], other.favorite_color)) {
+        this.favorite_color = data().deepCopy(fields()[2].schema(), other.favorite_color);
+        fieldSetFlags()[2] = true;
+      }
+    }
+    
+    /** Creates a Builder by copying an existing User instance */
+    private Builder(org.apache.flink.api.io.avro.example.User other) {
+            super(org.apache.flink.api.io.avro.example.User.SCHEMA$);
+      if (isValidValue(fields()[0], other.name)) {
+        this.name = data().deepCopy(fields()[0].schema(), other.name);
+        fieldSetFlags()[0] = true;
+      }
+      if (isValidValue(fields()[1], other.favorite_number)) {
+        this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number);
+        fieldSetFlags()[1] = true;
+      }
+      if (isValidValue(fields()[2], other.favorite_color)) {
+        this.favorite_color = data().deepCopy(fields()[2].schema(), other.favorite_color);
+        fieldSetFlags()[2] = true;
+      }
+    }
+
+    /** Gets the value of the 'name' field */
+    public java.lang.CharSequence getName() {
+      return name;
+    }
+    
+    /** Sets the value of the 'name' field */
+    public org.apache.flink.api.io.avro.example.User.Builder setName(java.lang.CharSequence value) {
+      validate(fields()[0], value);
+      this.name = value;
+      fieldSetFlags()[0] = true;
+      return this; 
+    }
+    
+    /** Checks whether the 'name' field has been set */
+    public boolean hasName() {
+      return fieldSetFlags()[0];
+    }
+    
+    /** Clears the value of the 'name' field */
+    public org.apache.flink.api.io.avro.example.User.Builder clearName() {
+      name = null;
+      fieldSetFlags()[0] = false;
+      return this;
+    }
+
+    /** Gets the value of the 'favorite_number' field */
+    public java.lang.Integer getFavoriteNumber() {
+      return favorite_number;
+    }
+    
+    /** Sets the value of the 'favorite_number' field */
+    public org.apache.flink.api.io.avro.example.User.Builder setFavoriteNumber(java.lang.Integer value) {
+      validate(fields()[1], value);
+      this.favorite_number = value;
+      fieldSetFlags()[1] = true;
+      return this; 
+    }
+    
+    /** Checks whether the 'favorite_number' field has been set */
+    public boolean hasFavoriteNumber() {
+      return fieldSetFlags()[1];
+    }
+    
+    /** Clears the value of the 'favorite_number' field */
+    public org.apache.flink.api.io.avro.example.User.Builder clearFavoriteNumber() {
+      favorite_number = null;
+      fieldSetFlags()[1] = false;
+      return this;
+    }
+
+    /** Gets the value of the 'favorite_color' field */
+    public java.lang.CharSequence getFavoriteColor() {
+      return favorite_color;
+    }
+    
+    /** Sets the value of the 'favorite_color' field */
+    public org.apache.flink.api.io.avro.example.User.Builder setFavoriteColor(java.lang.CharSequence value) {
+      validate(fields()[2], value);
+      this.favorite_color = value;
+      fieldSetFlags()[2] = true;
+      return this; 
+    }
+    
+    /** Checks whether the 'favorite_color' field has been set */
+    public boolean hasFavoriteColor() {
+      return fieldSetFlags()[2];
+    }
+    
+    /** Clears the value of the 'favorite_color' field */
+    public org.apache.flink.api.io.avro.example.User.Builder clearFavoriteColor() {
+      favorite_color = null;
+      fieldSetFlags()[2] = false;
+      return this;
+    }
+
+    @Override
+    public User build() {
+      try {
+        User record = new User();
+        record.name = fieldSetFlags()[0] ? this.name : (java.lang.CharSequence) defaultValue(fields()[0]);
+        record.favorite_number = fieldSetFlags()[1] ? this.favorite_number : (java.lang.Integer) defaultValue(fields()[1]);
+        record.favorite_color = fieldSetFlags()[2] ? this.favorite_color : (java.lang.CharSequence) defaultValue(fields()[2]);
+        return record;
+      } catch (Exception e) {
+        throw new org.apache.avro.AvroRuntimeException(e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/java/io/AvroInputFormat.java
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/java/io/AvroInputFormat.java b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/java/io/AvroInputFormat.java
new file mode 100644
index 0000000..09fcacb
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/java/io/AvroInputFormat.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.flink.api.java.io;
+
+import java.io.IOException;
+
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.file.SeekableInput;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.flink.api.avro.FSDataInputStreamWrapper;
+import org.apache.flink.api.common.io.FileInputFormat;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.java.typeutils.ResultTypeQueryable;
+import org.apache.flink.api.java.typeutils.TypeExtractor;
+import org.apache.flink.core.fs.FileInputSplit;
+import org.apache.flink.core.fs.Path;
+import org.apache.flink.util.InstantiationUtil;
+
+
+public class AvroInputFormat<E> extends FileInputFormat<E> implements ResultTypeQueryable<E> {
+	
+	private static final long serialVersionUID = 1L;
+
+	private static final Logger LOG = LoggerFactory.getLogger(AvroInputFormat.class);
+
+	private final Class<E> avroValueType;
+	
+	private boolean reuseAvroValue = true;
+
+	private transient FileReader<E> dataFileReader;
+
+	private transient long end;
+
+	
+	public AvroInputFormat(Path filePath, Class<E> type) {
+		super(filePath);
+		this.avroValueType = type;
+	}
+	
+	
+	/**
+	 * Sets the flag whether to reuse the Avro value instance for all records.
+	 * By default, the input format reuses the Avro value.
+	 *
+	 * @param reuseAvroValue True, if the input format should reuse the Avro value instance, false otherwise.
+	 */
+	public void setReuseAvroValue(boolean reuseAvroValue) {
+		this.reuseAvroValue = reuseAvroValue;
+	}
+
+	/**
+	 * If set, the InputFormat will only read entire files.
+	 */
+	public void setUnsplittable(boolean unsplittable) {
+		this.unsplittable = unsplittable;
+	}
+	
+	// --------------------------------------------------------------------------------------------
+	// Typing
+	// --------------------------------------------------------------------------------------------
+	
+	@Override
+	public TypeInformation<E> getProducedType() {
+		return TypeExtractor.getForClass(this.avroValueType);
+	}
+	
+	// --------------------------------------------------------------------------------------------
+	// Input Format Methods
+	// --------------------------------------------------------------------------------------------
+
+	@Override
+	public void open(FileInputSplit split) throws IOException {
+		super.open(split);
+
+		DatumReader<E> datumReader;
+		if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType)) {
+			datumReader = new SpecificDatumReader<E>(avroValueType);
+		} else {
+			datumReader = new ReflectDatumReader<E>(avroValueType);
+		}
+		
+		LOG.info("Opening split " + split);
+		
+		SeekableInput in = new FSDataInputStreamWrapper(stream, split.getPath().getFileSystem().getFileStatus(split.getPath()).getLen());
+
+		dataFileReader = DataFileReader.openReader(in, datumReader);
+		dataFileReader.sync(split.getStart());
+		this.end = split.getStart() + split.getLength();
+	}
+
+	@Override
+	public boolean reachedEnd() throws IOException {
+		return !dataFileReader.hasNext() || dataFileReader.pastSync(end);
+	}
+
+	@Override
+	public E nextRecord(E reuseValue) throws IOException {
+		if (reachedEnd()) {
+			return null;
+		}
+		
+		if (!reuseAvroValue) {
+			reuseValue = InstantiationUtil.instantiate(avroValueType, Object.class);
+		}
+		
+		reuseValue = dataFileReader.next(reuseValue);
+		return reuseValue;
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/java/io/AvroOutputFormat.java
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/java/io/AvroOutputFormat.java b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/java/io/AvroOutputFormat.java
new file mode 100644
index 0000000..d00dbf7
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/src/main/java/org/apache/flink/api/java/io/AvroOutputFormat.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.flink.api.java.io;
+
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.flink.api.common.io.FileOutputFormat;
+import org.apache.flink.core.fs.Path;
+
+import java.io.IOException;
+
+public class AvroOutputFormat<E> extends FileOutputFormat<E> {
+
+	private static final long serialVersionUID = 1L;
+
+	private final Class<E> avroValueType;
+
+	private Schema userDefinedSchema = null;
+
+	private transient DataFileWriter<E> dataFileWriter;
+
+	public AvroOutputFormat(Path filePath, Class<E> type) {
+		super(filePath);
+		this.avroValueType = type;
+	}
+
+	public AvroOutputFormat(Class<E> type) {
+		this.avroValueType = type;
+	}
+
+	@Override
+	protected String getDirectoryFileName(int taskNumber) {
+		return super.getDirectoryFileName(taskNumber) + ".avro";
+	}
+
+	public void setSchema(Schema schema) {
+		this.userDefinedSchema = schema;
+	}
+
+	@Override
+	public void writeRecord(E record) throws IOException {
+		dataFileWriter.append(record);
+	}
+
+	@Override
+	public void open(int taskNumber, int numTasks) throws IOException {
+		super.open(taskNumber, numTasks);
+
+		DatumWriter<E> datumWriter;
+		Schema schema = null;
+		if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType)) {
+			datumWriter = new SpecificDatumWriter<E>(avroValueType);
+			try {
+				schema = ((org.apache.avro.specific.SpecificRecordBase)avroValueType.newInstance()).getSchema();
+			} catch (InstantiationException e) {
+				throw new RuntimeException(e.getMessage());
+			} catch (IllegalAccessException e) {
+				throw new RuntimeException(e.getMessage());
+			}
+		} else {
+			datumWriter = new ReflectDatumWriter<E>(avroValueType);
+			schema = ReflectData.get().getSchema(avroValueType);
+		}
+		dataFileWriter = new DataFileWriter<E>(datumWriter);
+		if (userDefinedSchema == null) {
+			dataFileWriter.create(schema, stream);
+		} else {
+			dataFileWriter.create(userDefinedSchema, stream);
+		}
+	}
+
+	@Override
+	public void close() throws IOException {
+		dataFileWriter.flush();
+		dataFileWriter.close();
+		super.close();
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9bf13d8/flink-batch-connectors/flink-avro/src/test/assembly/test-assembly.xml
----------------------------------------------------------------------
diff --git a/flink-batch-connectors/flink-avro/src/test/assembly/test-assembly.xml b/flink-batch-connectors/flink-avro/src/test/assembly/test-assembly.xml
new file mode 100644
index 0000000..0f4561a
--- /dev/null
+++ b/flink-batch-connectors/flink-avro/src/test/assembly/test-assembly.xml
@@ -0,0 +1,36 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<assembly>
+	<id>test-jar</id>
+	<formats>
+		<format>jar</format>
+	</formats>
+	<includeBaseDirectory>false</includeBaseDirectory>
+	<fileSets>
+		<fileSet>
+			<directory>${project.build.testOutputDirectory}</directory>
+			<outputDirectory>/</outputDirectory>
+			<!--modify/add include to match your package(s) -->
+			<includes>
+				<include>org/apache/flink/api/avro/testjar/**</include>
+			</includes>
+		</fileSet>
+	</fileSets>
+</assembly>
\ No newline at end of file


Mime
View raw message