[ https://issues.apache.org/jira/browse/PHOENIX-4133?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
ZhuQQ updated PHOENIX-4133:
---------------------------
Description:
In some case, we create hive tables with different order, and may not contains all columns
in the phoenix tables, then we found `INSERT INTO test SELECT ...` not works well.
For example:
{code:sql}
-- In Phoenix:
CREATE TABLE IF NOT EXISTS test (
key1 VARCHAR NOT NULL,
key2 INTEGER NOT NULL,
key3 VARCHAR,
pv BIGINT,
uv BIGINT,
CONSTRAINT PK PRIMARY KEY (key1, key2, key3)
);
{code}
{code:sql}
-- In Hive:
CREATE EXTERNAL TABLE test.test_part (
key1 string,
key2 int,
pv bigint
)
STORED BY 'org.apache.phoenix.hive.PhoenixStorageHandler'
TBLPROPERTIES (
"phoenix.table.name" = "test",
"phoenix.zookeeper.quorum" = "localhost",
"phoenix.zookeeper.znode.parent" = "/hbase",
"phoenix.zookeeper.client.port" = "2181",
"phoenix.rowkeys" = "key1,key2",
"phoenix.column.mapping" = "key1:key1,key2:key2,pv:pv"
);
CREATE EXTERNAL TABLE test.test_uv (
key1 string,
key2 int,
key3 string,
app_version string,
channel string,
uv bigint
)
STORED BY 'org.apache.phoenix.hive.PhoenixStorageHandler'
TBLPROPERTIES (
"phoenix.table.name" = "test",
"phoenix.zookeeper.quorum" = "localhost",
"phoenix.zookeeper.znode.parent" = "/hbase",
"phoenix.zookeeper.client.port" = "2181",
"phoenix.rowkeys" = "key1,key2,key3",
"phoenix.column.mapping" = "key1:key1,key2:key2,key3:key3,uv:uv"
);
{code}
Then insert to {{test.test_part}}:
{code:sql}
INSERT INTO test.test_part SELECT 'some key', 20170828,80;
{code}
throws error:
{code:java}
ERROR 203 (22005): Type mismatch. BIGINT cannot be coerced to VARCHAR
{code}
And insert to {{test.test_uv}}:
{code:sql}
INSERT INTO test.test_uv SELECT 'some key',20170828,'linux',11;
{code}
Job executed successfully, but pv is overrided to 11 and uv is still NULL.
PS: haven't test other versions, but by checking the latest source code, new versions may
also have same problems
was:
In some case, we create hive tables with different order, and may not contains all columns
in the phoenix tables, then we found `INSERT INTO test SELECT ...` not works well.
For example:
{code:sql}
-- In Phoenix:
CREATE TABLE IF NOT EXISTS test (
key1 VARCHAR NOT NULL,
key2 INTEGER NOT NULL,
key3 VARCHAR,
pv BIGINT,
uv BIGINT,
CONSTRAINT PK PRIMARY KEY (key1, key2, key3)
);
{code}
{code:sql}
-- In Hive:
CREATE EXTERNAL TABLE test.test_part (
key1 string,
key2 int,
pv bigint
)
STORED BY 'org.apache.phoenix.hive.PhoenixStorageHandler'
TBLPROPERTIES (
"phoenix.table.name" = "test",
"phoenix.zookeeper.quorum" = "localhost",
"phoenix.zookeeper.znode.parent" = "/hbase",
"phoenix.zookeeper.client.port" = "2181",
"phoenix.rowkeys" = "key1,key2",
"phoenix.column.mapping" = "key1:key1,key2:key2,pv:pv"
);
CREATE EXTERNAL TABLE test.test_uv (
key1 string,
key2 int,
key3 string,
app_version string,
channel string,
uv bigint
)
STORED BY 'org.apache.phoenix.hive.PhoenixStorageHandler'
TBLPROPERTIES (
"phoenix.table.name" = "test",
"phoenix.zookeeper.quorum" = "localhost",
"phoenix.zookeeper.znode.parent" = "/hbase",
"phoenix.zookeeper.client.port" = "2181",
"phoenix.rowkeys" = "key1,key2,key3",
"phoenix.column.mapping" = "key1:key1,key2:key2,key3:key3,uv:uv"
);
{code}
Then insert to {{test.test_part}}:
{code:sql}
INSERT INTO test.test_part SELECT 'some key', 20170828,80;
{code}
throws error:
{code:java}
ERROR 203 (22005): Type mismatch. BIGINT cannot be coerced to VARCHAR
{code}
And insert to {{test.test_uv}}:
{code:sql}
INSERT INTO test.test_uv SELECT 'some key',20170828,'linux',11;
{code}
Job executed successfully, but pv is overrided to 11 and uv is still NULL.
PS: haven't test other versions, but by check the latest source code, new versions may also
have same problems
> [hive] ColumnInfo list should be reordered and filtered refer the hive tables
> -----------------------------------------------------------------------------
>
> Key: PHOENIX-4133
> URL: https://issues.apache.org/jira/browse/PHOENIX-4133
> Project: Phoenix
> Issue Type: Bug
> Affects Versions: 4.9.0
> Reporter: ZhuQQ
>
> In some case, we create hive tables with different order, and may not contains all columns
in the phoenix tables, then we found `INSERT INTO test SELECT ...` not works well.
> For example:
> {code:sql}
> -- In Phoenix:
> CREATE TABLE IF NOT EXISTS test (
> key1 VARCHAR NOT NULL,
> key2 INTEGER NOT NULL,
> key3 VARCHAR,
> pv BIGINT,
> uv BIGINT,
> CONSTRAINT PK PRIMARY KEY (key1, key2, key3)
> );
> {code}
> {code:sql}
> -- In Hive:
> CREATE EXTERNAL TABLE test.test_part (
> key1 string,
> key2 int,
> pv bigint
> )
> STORED BY 'org.apache.phoenix.hive.PhoenixStorageHandler'
> TBLPROPERTIES (
> "phoenix.table.name" = "test",
> "phoenix.zookeeper.quorum" = "localhost",
> "phoenix.zookeeper.znode.parent" = "/hbase",
> "phoenix.zookeeper.client.port" = "2181",
> "phoenix.rowkeys" = "key1,key2",
> "phoenix.column.mapping" = "key1:key1,key2:key2,pv:pv"
> );
> CREATE EXTERNAL TABLE test.test_uv (
> key1 string,
> key2 int,
> key3 string,
> app_version string,
> channel string,
> uv bigint
> )
> STORED BY 'org.apache.phoenix.hive.PhoenixStorageHandler'
> TBLPROPERTIES (
> "phoenix.table.name" = "test",
> "phoenix.zookeeper.quorum" = "localhost",
> "phoenix.zookeeper.znode.parent" = "/hbase",
> "phoenix.zookeeper.client.port" = "2181",
> "phoenix.rowkeys" = "key1,key2,key3",
> "phoenix.column.mapping" = "key1:key1,key2:key2,key3:key3,uv:uv"
> );
> {code}
> Then insert to {{test.test_part}}:
> {code:sql}
> INSERT INTO test.test_part SELECT 'some key', 20170828,80;
> {code}
> throws error:
> {code:java}
> ERROR 203 (22005): Type mismatch. BIGINT cannot be coerced to VARCHAR
> {code}
> And insert to {{test.test_uv}}:
> {code:sql}
> INSERT INTO test.test_uv SELECT 'some key',20170828,'linux',11;
> {code}
> Job executed successfully, but pv is overrided to 11 and uv is still NULL.
> PS: haven't test other versions, but by checking the latest source code, new versions
may also have same problems
--
This message was sent by Atlassian JIRA
(v6.4.14#64029)
|