From issues-return-178212-archive-asf-public=cust-asf.ponee.io@flink.apache.org Tue Jul 17 12:12:42 2018 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id 70C7E18067A for ; Tue, 17 Jul 2018 12:12:41 +0200 (CEST) Received: (qmail 30829 invoked by uid 500); 17 Jul 2018 10:12:40 -0000 Mailing-List: contact issues-help@flink.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@flink.apache.org Delivered-To: mailing list issues@flink.apache.org Received: (qmail 30816 invoked by uid 99); 17 Jul 2018 10:12:40 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 17 Jul 2018 10:12:40 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 6BDC2DFB19; Tue, 17 Jul 2018 10:12:40 +0000 (UTC) From: pnowojski To: issues@flink.apache.org Reply-To: issues@flink.apache.org References: In-Reply-To: Subject: [GitHub] flink pull request #6343: [FLINK-9852] [table] Expose descriptor-based sink ... Content-Type: text/plain Message-Id: <20180717101240.6BDC2DFB19@git1-us-west.apache.org> Date: Tue, 17 Jul 2018 10:12:40 +0000 (UTC) Github user pnowojski commented on a diff in the pull request: https://github.com/apache/flink/pull/6343#discussion_r202963234 --- Diff: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/catalog/ExternalTableUtil.scala --- @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.table.catalog + +import java.util + +import org.apache.flink.table.api._ +import org.apache.flink.table.descriptors.DescriptorProperties +import org.apache.flink.table.factories._ +import org.apache.flink.table.plan.schema._ +import org.apache.flink.table.plan.stats.FlinkStatistic +import org.apache.flink.table.util.Logging + + +/** + * The utility class is used to convert [[ExternalCatalogTable]] to [[TableSourceSinkTable]]. + * + * It uses [[TableFactoryService]] for discovering. + */ +object ExternalTableUtil extends Logging { + + /** + * Converts an [[ExternalCatalogTable]] instance to a [[TableSourceTable]] instance + * + * @param externalCatalogTable the [[ExternalCatalogTable]] instance which to convert + * @return converted [[TableSourceTable]] instance from the input catalog table + */ + def fromExternalCatalogTable[T1, T2]( + tableEnv: TableEnvironment, + externalCatalogTable: ExternalCatalogTable) + : TableSourceSinkTable[T1, T2] = { + + val properties = new DescriptorProperties() + externalCatalogTable.addProperties(properties) + val javaMap = properties.asMap + val statistics = new FlinkStatistic(externalCatalogTable.getTableStats) + + val source: Option[TableSourceTable[T1]] = tableEnv match { + // check for a batch table source in this batch environment + case _: BatchTableEnvironment if externalCatalogTable.isBatchTable => + createBatchTableSource(externalCatalogTable, javaMap, statistics) + + // check for a stream table source in this stream environment + case _: StreamTableEnvironment if externalCatalogTable.isStreamTable => + createStreamTableSource(externalCatalogTable, javaMap, statistics) + + case _ => + throw new ValidationException( + "External catalog table does not support the current environment for a table source.") + } + + val sink: Option[TableSinkTable[T2]] = tableEnv match { + // check for a batch table sink in this batch environment + case _: BatchTableEnvironment if externalCatalogTable.isBatchTable => + createBatchTableSink(externalCatalogTable, javaMap, statistics) + + // check for a stream table sink in this stream environment + case _: StreamTableEnvironment if externalCatalogTable.isStreamTable => + createStreamTableSink(externalCatalogTable, javaMap, statistics) + + case _ => + throw new ValidationException( + "External catalog table does not support the current environment for a table sink.") + } + + new TableSourceSinkTable[T1, T2](source, sink) + } + + private def createBatchTableSource[T]( + externalCatalogTable: ExternalCatalogTable, + javaMap: util.Map[String, String], + statistics: FlinkStatistic) + : Option[TableSourceTable[T]] = if (externalCatalogTable.isTableSource) { --- End diff -- reverse if/else branches - simpler case should be first also if you change it to ``` if (!externalCatalogTable.isTableSource) { return None } val source = TableFactoryService .find(classOf[BatchTableSourceFactory[T]], javaMap) .createBatchTableSource(javaMap) val table = new BatchTableSourceTable( source, statistics) Some(table) ``` it would even further simplify the code (reader wouldn't have to track one extra level of nesting) ditto in other places ---