hive-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "Seoeun Park (JIRA)" <j...@apache.org>
Subject [jira] [Updated] (HIVE-2190) WritableDoubleObjectInspector throws ClassCastException when is used in GenericUDF.
Date Wed, 01 Jun 2011 06:03:47 GMT

     [ https://issues.apache.org/jira/browse/HIVE-2190?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]

Seoeun Park updated HIVE-2190:
------------------------------

    Description: 
WritableDoubleObjectInspector throws ClassCastException when I use it in GenericUDF.
I think it is because the type of DoubleWritable is in WritableDoubleObjectInspector is org.apache.hadoop.hive.serde2.io
instead of org.apache.hadoop.io.

In WritableDoubleObjectInspector,
org.apache.hadoop.hive.serde2.io.DoubleWritable works fine in GenericUDAF but not in GenericUDF.

Here, it is query and sample code:

   select to_double("1234.11") from src;



{code}
public class GenericUDFToDouble extends GenericUDF {

	private ObjectInspectorConverters.Converter[] converters;

	private ObjectInspector[] argumentOIs;

	private ObjectInspector returnInspector;

	@Override
	public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException
{

		converters = new ObjectInspectorConverters.Converter[arguments.length];
		for (int i = 0; i < arguments.length; i++) {
			converters[i] = ObjectInspectorConverters.getConverter(arguments[i],
					PrimitiveObjectInspectorFactory.writableStringObjectInspector);
		}

		argumentOIs = arguments;

		returnInspector = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;

		return returnInspector;
	}

	private DoubleWritable doubleResult = new DoubleWritable();

	@Override
	public Object evaluate(DeferredObject[] arguments) throws HiveException {

		if (arguments[0].get() == null) {
			return null;
		}

		try {
			Text value1 = (Text) converters[0].convert(arguments[0].get());
	
			doubleResult.set(Double.parseDouble(value1.toString()));
			return doubleResult;

		} catch (Exception e) {
			e.printStackTrace();
			return null;
		}

	}

	public String getDisplayString(String[] children) {
		return "to_double";
	}
	
}
{code}

Exception trace
===============================
Caused by: java.lang.ClassCastException: org.apache.hadoop.io.DoubleWritable cannot be cast
to org.apache.hadoop.hive.serde2.io.DoubleWritable
	at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector.get(WritableDoubleObjectInspector.java:37)
	at org.apache.hadoop.hive.serde2.lazy.LazyUtils.writePrimitiveUTF8(LazyUtils.java:200)
	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serialize(LazySimpleSerDe.java:442)
	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serializeField(LazySimpleSerDe.java:396)
	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serialize(LazySimpleSerDe.java:383)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.processOp(FileSinkOperator.java:553)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
	at org.apache.hadoop.hive.ql.exec.SelectOperator.processOp(SelectOperator.java:84)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
	at org.apache.hadoop.hive.ql.exec.TableScanOperator.processOp(TableScanOperator.java:78)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
	at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:527)
	... 5 more
=======================================





  was:
WritableDoubleObjectInspector throws ClassCastException when I use it in GenericUDF.
I think it is because the type of DoubleWritable is in WritableDoubleObjectInspector is org.apache.hadoop.hive.serde2.io
instead of org.apache.hadoop.io.

In WritableDoubleObjectInspector,
org.apache.hadoop.hive.serde2.io.DoubleWritable works fine in GenericUDAF but not in GenericUDF.

Here, it is query and sample code:

   select to_double(1234.11) from src;



{code}
public class GenericUDFToDouble extends GenericUDF {

	private ObjectInspectorConverters.Converter[] converters;

	private ObjectInspector[] argumentOIs;

	private ObjectInspector returnInspector;

	@Override
	public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException
{

		converters = new ObjectInspectorConverters.Converter[arguments.length];
		for (int i = 0; i < arguments.length; i++) {
			converters[i] = ObjectInspectorConverters.getConverter(arguments[i],
					PrimitiveObjectInspectorFactory.writableStringObjectInspector);
		}

		argumentOIs = arguments;

		returnInspector = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;

		return returnInspector;
	}

	private DoubleWritable doubleResult = new DoubleWritable();

	@Override
	public Object evaluate(DeferredObject[] arguments) throws HiveException {

		if (arguments[0].get() == null) {
			return null;
		}

		try {
			Text value1 = (Text) converters[0].convert(arguments[0].get());
	
			doubleResult.set(Double.parseDouble(value1.toString()));
			return doubleResult;

		} catch (Exception e) {
			e.printStackTrace();
			return null;
		}

	}

	public String getDisplayString(String[] children) {
		return "to_double";
	}
	
}
{code}

Exception trace
===============================
Caused by: java.lang.ClassCastException: org.apache.hadoop.io.DoubleWritable cannot be cast
to org.apache.hadoop.hive.serde2.io.DoubleWritable
	at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector.get(WritableDoubleObjectInspector.java:37)
	at org.apache.hadoop.hive.serde2.lazy.LazyUtils.writePrimitiveUTF8(LazyUtils.java:200)
	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serialize(LazySimpleSerDe.java:442)
	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serializeField(LazySimpleSerDe.java:396)
	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serialize(LazySimpleSerDe.java:383)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.processOp(FileSinkOperator.java:553)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
	at org.apache.hadoop.hive.ql.exec.SelectOperator.processOp(SelectOperator.java:84)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
	at org.apache.hadoop.hive.ql.exec.TableScanOperator.processOp(TableScanOperator.java:78)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
	at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:527)
	... 5 more
=======================================






> WritableDoubleObjectInspector throws ClassCastException when is used in GenericUDF.
> -----------------------------------------------------------------------------------
>
>                 Key: HIVE-2190
>                 URL: https://issues.apache.org/jira/browse/HIVE-2190
>             Project: Hive
>          Issue Type: Bug
>          Components: Serializers/Deserializers
>    Affects Versions: 0.7.0
>         Environment: Ubuntu 10.10
>            Reporter: Seoeun Park
>            Priority: Minor
>             Fix For: 0.7.1
>
>
> WritableDoubleObjectInspector throws ClassCastException when I use it in GenericUDF.
> I think it is because the type of DoubleWritable is in WritableDoubleObjectInspector
is org.apache.hadoop.hive.serde2.io instead of org.apache.hadoop.io.
> In WritableDoubleObjectInspector,
> org.apache.hadoop.hive.serde2.io.DoubleWritable works fine in GenericUDAF but not in
GenericUDF.
> Here, it is query and sample code:
>    select to_double("1234.11") from src;
> {code}
> public class GenericUDFToDouble extends GenericUDF {
> 	private ObjectInspectorConverters.Converter[] converters;
> 	private ObjectInspector[] argumentOIs;
> 	private ObjectInspector returnInspector;
> 	@Override
> 	public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException
{
> 		converters = new ObjectInspectorConverters.Converter[arguments.length];
> 		for (int i = 0; i < arguments.length; i++) {
> 			converters[i] = ObjectInspectorConverters.getConverter(arguments[i],
> 					PrimitiveObjectInspectorFactory.writableStringObjectInspector);
> 		}
> 		argumentOIs = arguments;
> 		returnInspector = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
> 		return returnInspector;
> 	}
> 	private DoubleWritable doubleResult = new DoubleWritable();
> 	@Override
> 	public Object evaluate(DeferredObject[] arguments) throws HiveException {
> 		if (arguments[0].get() == null) {
> 			return null;
> 		}
> 		try {
> 			Text value1 = (Text) converters[0].convert(arguments[0].get());
> 	
> 			doubleResult.set(Double.parseDouble(value1.toString()));
> 			return doubleResult;
> 		} catch (Exception e) {
> 			e.printStackTrace();
> 			return null;
> 		}
> 	}
> 	public String getDisplayString(String[] children) {
> 		return "to_double";
> 	}
> 	
> }
> {code}
> Exception trace
> ===============================
> Caused by: java.lang.ClassCastException: org.apache.hadoop.io.DoubleWritable cannot be
cast to org.apache.hadoop.hive.serde2.io.DoubleWritable
> 	at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector.get(WritableDoubleObjectInspector.java:37)
> 	at org.apache.hadoop.hive.serde2.lazy.LazyUtils.writePrimitiveUTF8(LazyUtils.java:200)
> 	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serialize(LazySimpleSerDe.java:442)
> 	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serializeField(LazySimpleSerDe.java:396)
> 	at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serialize(LazySimpleSerDe.java:383)
> 	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.processOp(FileSinkOperator.java:553)
> 	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
> 	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
> 	at org.apache.hadoop.hive.ql.exec.SelectOperator.processOp(SelectOperator.java:84)
> 	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
> 	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
> 	at org.apache.hadoop.hive.ql.exec.TableScanOperator.processOp(TableScanOperator.java:78)
> 	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:471)
> 	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:744)
> 	at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:527)
> 	... 5 more
> =======================================

--
This message is automatically generated by JIRA.
For more information on JIRA, see: http://www.atlassian.com/software/jira

Mime
View raw message