drill-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Dave Oshinsky <doshin...@commvault.com>
Subject RE: question on failed unit test (in protobuf)
Date Fri, 12 Aug 2016 17:08:35 GMT
Jinfeng,
Actually, it turned out that the error message was because some of the hooks in the code to
setup the Casting mechanism were missing for VARDECIMAL.  Once I added these (in a number
of places), I'm now encountering a weird problem as below signature.  It's using the copyFrom
method, which is protected, instead of copyFromSafe, which is public.  Which piece of code
is input to this generated CopierGen1 code?
Dave Oshinsky

The error:
0: jdbc:drill:zk=local> select employee_id, salary, commission_pct from dfs.`c:/
apache/drill-master/drill/exec/java-exec/src/test/resources/parquet/varlenDecima
l.parquet` where employee_id=100;
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further detail
s.
Error: SYSTEM ERROR: CompileException: File 'C:\tmp\code\janino19816979456188725
35.java', Line 76, Column 27: No applicable constructor/method found for actual
parameters "int, int, org.apache.drill.exec.vector.VarDecimalVector"; candidates
 are: "protected void org.apache.drill.exec.vector.VarDecimalVector.copyFrom(int
, int, org.apache.drill.exec.vector.VarDecimalVector)"

Fragment 0:0

[Error Id: aca1f748-4494-45d4-b13a-eee24ff2c9a6 on DaveOshinsky-PC.gp.cv.commvau
lt.com:31010] (state=,code=0)
0: jdbc:drill:zk=local>

The code that failed to compile:
$ cat c:/tmp/code/*2535.java

package org.apache.drill.exec.test.generated;

import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.record.RecordBatch;
import org.apache.drill.exec.vector.NullableVarDecimalVector;
import org.apache.drill.exec.vector.VarDecimalVector;

public class CopierGen1 {

    VarDecimalVector vv0;
    VarDecimalVector vv3;
    NullableVarDecimalVector vv6;
    NullableVarDecimalVector vv9;
    NullableVarDecimalVector vv12;
    NullableVarDecimalVector vv15;

    public void doSetup(FragmentContext context, RecordBatch incoming, RecordBatch outgoing)
        throws SchemaChangeException
    {
        {
            int[] fieldIds1 = new int[ 1 ] ;
            fieldIds1 [ 0 ] = 0;
            Object tmp2 = (incoming).getValueAccessorById(VarDecimalVector.class, fieldIds1).getValueVector();
            if (tmp2 == null) {
                throw new SchemaChangeException("Failure while loading vector vv0 with id:
TypedFieldId [fieldIds=[0], remainder=null].");
            }
            vv0 = ((VarDecimalVector) tmp2);
            int[] fieldIds4 = new int[ 1 ] ;
            fieldIds4 [ 0 ] = 0;
            Object tmp5 = (outgoing).getValueAccessorById(VarDecimalVector.class, fieldIds4).getValueVector();
            if (tmp5 == null) {
                throw new SchemaChangeException("Failure while loading vector vv3 with id:
TypedFieldId [fieldIds=[0], remainder=null].");
            }
            vv3 = ((VarDecimalVector) tmp5);
        }
        {
            int[] fieldIds7 = new int[ 1 ] ;
            fieldIds7 [ 0 ] = 1;
            Object tmp8 = (incoming).getValueAccessorById(NullableVarDecimalVector.class,
fieldIds7).getValueVector();
            if (tmp8 == null) {
                throw new SchemaChangeException("Failure while loading vector vv6 with id:
TypedFieldId [fieldIds=[1], remainder=null].");
            }
            vv6 = ((NullableVarDecimalVector) tmp8);
            int[] fieldIds10 = new int[ 1 ] ;
            fieldIds10 [ 0 ] = 1;
            Object tmp11 = (outgoing).getValueAccessorById(NullableVarDecimalVector.class,
fieldIds10).getValueVector();
            if (tmp11 == null) {
                throw new SchemaChangeException("Failure while loading vector vv9 with id:
TypedFieldId [fieldIds=[1], remainder=null].");
            }
            vv9 = ((NullableVarDecimalVector) tmp11);
        }
        {
            int[] fieldIds13 = new int[ 1 ] ;
            fieldIds13 [ 0 ] = 2;
            Object tmp14 = (incoming).getValueAccessorById(NullableVarDecimalVector.class,
fieldIds13).getValueVector();
            if (tmp14 == null) {
                throw new SchemaChangeException("Failure while loading vector vv12 with id:
TypedFieldId [fieldIds=[2], remainder=null].");
            }
            vv12 = ((NullableVarDecimalVector) tmp14);
            int[] fieldIds16 = new int[ 1 ] ;
            fieldIds16 [ 0 ] = 2;
            Object tmp17 = (outgoing).getValueAccessorById(NullableVarDecimalVector.class,
fieldIds16).getValueVector();
            if (tmp17 == null) {
                throw new SchemaChangeException("Failure while loading vector vv15 with id:
TypedFieldId [fieldIds=[2], remainder=null].");
            }
            vv15 = ((NullableVarDecimalVector) tmp17);
        }
    }

    public void doEval(int inIndex, int outIndex)
        throws SchemaChangeException
    {
        {
            vv3 .copyFrom((inIndex), (outIndex), vv0);                   // JINFENG - this
should actually be copyFromSafe, not copyFrom
        }
        {
            vv9 .copyFrom((inIndex), (outIndex), vv6);
        }
        {
            vv15 .copyFrom((inIndex), (outIndex), vv12);
        }
    }

    public void __DRILL_INIT__()
        throws SchemaChangeException
    {
    }

}

doshinsky@DaveOshinsky-PC /cygdrive/c/apache/drill-1.7.0/exec/vector
$



Here's a code snippet from VarDecimalVector.java:

protected void copyFrom(int fromIndex, int thisIndex, VarDecimalVector from){
    final UInt4Vector.Accessor fromOffsetVectorAccessor = from.offsetVector.getAccessor();
    final int start = fromOffsetVectorAccessor.get(fromIndex);
    final int end = fromOffsetVectorAccessor.get(fromIndex + 1);
    final int len = end - start;

    final int outputStart = offsetVector.data.getInt(thisIndex * 4);
    from.data.getBytes(start, data, outputStart, len);
    offsetVector.data.setInt( (thisIndex+1) * 4, outputStart + len);
  }

  public boolean copyFromSafe(int fromIndex, int thisIndex, VarDecimalVector from){
    final UInt4Vector.Accessor fromOffsetVectorAccessor = from.offsetVector.getAccessor();
    final int start = fromOffsetVectorAccessor.get(fromIndex);
    final int end =   fromOffsetVectorAccessor.get(fromIndex + 1);
    final int len = end - start;
    final int outputStart = offsetVector.data.getInt(thisIndex * 4);

    while(data.capacity() < outputStart + len) {
        reAlloc();
    }

    offsetVector.getMutator().setSafe(thisIndex + 1, outputStart + len);
    from.data.getBytes(start, data, outputStart, len);
    return true;
  }

-----Original Message-----
From: Jinfeng Ni [mailto:jinfengni99@gmail.com] 
Sent: Thursday, August 11, 2016 7:28 PM
To: dev
Subject: Re: question on failed unit test (in protobuf)

For IDEA debugger, I just tried with one sample query, and it did stop in MaterializeVisitor.
One thing you may check is to make sure the expression passed to this visitor does have the
expression matches the breakpoint ( if you set a breakpoint in visitFunctionCall(), then the
expression should have at least one function).

For the error you posted, it means you do not have the function implementation for equal (VARDECIMAL-REQUIRED,
INT-REQUIRED)].  I assume in your case employee_id is VARDECIMAL type. If your code does not
insert implicit cast (from int to VARDECIMAL), then you have to have an "equal"
function (VARDECIMAL, INT).


On Thu, Aug 11, 2016 at 3:16 PM, Dave Oshinsky <doshinsky@commvault.com> wrote:
> Jinfeng,
> My build node has no readme in the folder with *.proto (protocol/src/main/protobuf).
 However, there's a readme.txt 3 levels up in "protocol", which I did not notice because I
ran protoc manually  (using Cygwin shell) in protocol/src/main/protobuf.  IDEA debugger did
help me to see the nature of the exception (which was neither caught nor logged), which pointed
to protobuf.
>
> The biggest problem I've been having with IDEA debugger is that some breakpoints don't
fire, like any breakpoint I set in this class within ExprsesionTreeMaterializer.java:
> private static class MaterializeVisitor extends 
> AbstractExprVisitor<LogicalExpression, FunctionLookupContext, 
> RuntimeException>
>
> If I set a breakpoint in a function elsewhere that is called from within that private
static class, the debugger won't even let me look at the source code for the stack frame in
that private static class.  All of this might be an IDEA bug, or perhaps there's some build
setting to modify (doubtful).
>
> Everything involving the new VarDecimal type is now running much, much better.  I've
changed quite a few of the Cast-related functions to get it to compile, but it's still failing
to run a query as below.  What am I missing?
>
> 0: jdbc:drill:zk=local> select employee_id, salary, commission_pct 
> from dfs.`c:/ 
> apache/drill-master/drill/exec/java-exec/src/test/resources/parquet/va
> rlenDecima
> l.parquet` where employee_id=100;
> SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
> SLF4J: Defaulting to no-operation (NOP) logger implementation
> SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for 
> further detail s.
> Error: SYSTEM ERROR: SchemaChangeException: Failure while trying to 
> materialize incoming schema.  Errors:
>
> Error in expression at index -1.  Error: Missing function 
> implementation: [equal (VARDECIMAL-REQUIRED, INT-REQUIRED)].  Full expression: --UNKNOWN
EXPRESSION--..
>
>
> Fragment 0:0
>
> [Error Id: 817627e1-aa4a-4791-8dc3-2f94a73bec9f on 
> DaveOshinsky-PC.gp.cv.commvau lt.com:31010] (state=,code=0)
> 0: jdbc:drill:zk=local>
>
> Dave Oshinsky
>
> -----Original Message-----
> From: Jinfeng Ni [mailto:jinfengni99@gmail.com]
> Sent: Thursday, August 11, 2016 2:05 PM
> To: dev
> Subject: Re: question on failed unit test (in protobuf)
>
> The readme.txt is right in protocol directory, where the protobuf definition files are
located. Actually, it was the first time I found this readme file yesterday, thanks to IDEA.
When I modified Types.proto (which I have not done so before), IDEA lists this readme file
not far away from Types.proto. That's how I end up with reading this file.
>
> Speaking about debugger used Drill, the IDEA debugger works fine for me normally. However,
if you modify the freemarker template (which Drill uses to "staticly" generate a set of java
source code in maven build time), then you need run "mvn install -DskipTests" or "mvn generate-sources"
to make sure the change in template generates new source code, before start debugging code
in IDEA.
>
> On Thu, Aug 11, 2016 at 7:50 AM, Dave Oshinsky <doshinsky@commvault.com> wrote:
>> Jinfeng,
>> Not initially, but I figured out how to correct the inconsistent protocols yesterday
(manually ran protoc on *.proto, after seeing other *.proto include Types.proto, and an uncaught
and un-logged class load exception under debugger after Drill just hung running a SQL select).
 I'm now unit testing new VarDecimal logic, with all the code I'm expecting to run, actually
running.
>>
>> For me, this was not an obvious place to look for those instructions.  I found I
needed to modify the protocol Types.proto by looking through source code, to location where
all types are initially defined.  It might help other newbies to put the instructions right
there in the same folder protocol/src/main/protobuf with the *.proto files.
>>
>> Are there any other hugely useful readme's I should be reading?  Like on how to successfully
use debugger on Drill (which is still flaky sometimes, latest IntelliJ IDEA)?
>> Dave Oshinsky
>>
>>
>> -----Original Message-----
>> From: Jinfeng Ni [mailto:jinfengni99@gmail.com]
>> Sent: Wednesday, August 10, 2016 8:59 PM
>> To: dev
>> Subject: Re: question on failed unit test (in protobuf)
>>
>> Did you follow the steps specified in this readme.txt [1]?
>>
>> [1] https://github.com/apache/drill/blob/master/protocol/readme.txt
>>
>>
>> On Tue, Aug 9, 2016 at 3:59 PM, Dave Oshinsky <doshinsky@commvault.com> wrote:
>>> I have a drill 1.7 node (Windows 7, Java 8) in which I've made numerous experimental
changes to try adding a new kind of vector, VarDecimal.  I found that I needed to modify Types.proto
to add "VARDECIMAL = 43;", and I ran protoc.exe to generate a new TypeProtos.java.  Do I need
to change something else to avoid this strange Protobuf-related failure?
>>> Thanks,
>>> Dave Oshinsky
>>>
>>> Running org.apache.drill.exec.DrillSeparatePlanningTest
>>> SLF4J: Failed toString() invocation on an object of type 
>>> [org.apache.drill.exec.proto.UserProtos$RunQuery]
>>> java.lang.ExceptionInInitializerError
>>>         at org.apache.drill.exec.proto.UserProtos.<clinit>(UserProtos.java:7304)
>>>         at org.apache.drill.exec.proto.UserProtos$RunQuery.internalGetFieldAccessorTable(UserProtos.java:3704)
>>>         at com.google.protobuf.GeneratedMessage.getAllFieldsMutable(GeneratedMessage.java:105)
>>>         at com.google.protobuf.GeneratedMessage.getAllFields(GeneratedMessage.java:153)
>>>         at com.google.protobuf.TextFormat$Printer.print(TextFormat.java:272)
>>>         at com.google.protobuf.TextFormat$Printer.access$400(TextFormat.java:248)
>>>         at com.google.protobuf.TextFormat.print(TextFormat.java:71)
>>>         at com.google.protobuf.TextFormat.printToString(TextFormat.java:118)
>>>         at com.google.protobuf.AbstractMessage.toString(AbstractMessage.java:106)
>>>         at org.slf4j.helpers.MessageFormatter.safeObjectAppend(MessageFormatter.java:305)
>>>         at org.slf4j.helpers.MessageFormatter.deeplyAppendParameter(MessageFormatter.java:277)
>>>         at org.slf4j.helpers.MessageFormatter.arrayFormat(MessageFormatter.java:231)
>>>         at ch.qos.logback.classic.spi.LoggingEvent.<init>(LoggingEvent.java:115)
>>>         at ch.qos.logback.classic.Logger.buildLoggingEventAndAppend(Logger.java:439)
>>>         at ch.qos.logback.classic.Logger.filterAndLog_1(Logger.java:413)
>>>         at ch.qos.logback.classic.Logger.debug(Logger.java:506)
>>>         at org.apache.drill.exec.client.DrillClient$ListHoldingResultsListener.<init>(DrillClient.java:428)
>>>         at org.apache.drill.exec.client.DrillClient.runQuery(DrillClient.java:327)
>>>         at org.apache.drill.QueryTestUtil.createClient(QueryTestUtil.java:69)
>>>         at org.apache.drill.BaseTestQuery.openClient(BaseTestQuery.java:196)
>>>         at org.apache.drill.BaseTestQuery.setupDefaultTestCluster(BaseTestQuery.java:122)
>>>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>         at java.lang.reflect.Method.invoke(Method.java:498)
>>>         at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
>>>         at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>>>         at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
>>>         at mockit.integration.junit4.internal.JUnit4TestRunnerDecorator.invokeExplosively(JUnit4TestRunnerDecorator.java:44)
>>>         at mockit.integration.junit4.internal.MockFrameworkMethod.invokeExplosively(MockFrameworkMethod.java:29)
>>>         at sun.reflect.GeneratedMethodAccessor15.invoke(Unknown Source)
>>>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>         at java.lang.reflect.Method.invoke(Method.java:498)
>>>         at mockit.internal.util.MethodReflection.invokeWithCheckedThrows(MethodReflection.java:95)
>>>         at mockit.internal.annotations.MockMethodBridge.callMock(MockMethodBridge.java:76)
>>>         at mockit.internal.annotations.MockMethodBridge.invoke(MockMethodBridge.java:41)
>>>         at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java)
>>>         at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24)
>>>         at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
>>>         at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
>>>         at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264)
>>>         at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153)
>>>         at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124)
>>>         at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
>>>         at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
>>>         at
>>> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:
>>> 1
>>> 03) Caused by: java.lang.IllegalArgumentException: Invalid embedded 
>>> descriptor for "SchemaDef.proto".
>>>         at com.google.protobuf.Descriptors$FileDescriptor.internalBuildGeneratedFileFrom(Descriptors.java:301)
>>>         at org.apache.drill.exec.proto.SchemaDefProtos.<clinit>(SchemaDefProtos.java:142)
>>>         ... 46 more
>>> Caused by: com.google.protobuf.Descriptors$DescriptorValidationException: SchemaDef.proto:
Dependencies passed to FileDescriptor.buildFrom() don't match those listed in the FileDescriptorProto.
>>>         at com.google.protobuf.Descriptors$FileDescriptor.buildFrom(Descriptors.java:246)
>>>         at com.google.protobuf.Descriptors$FileDescriptor.internalBuildGeneratedFileFrom(Descriptors.java:299)
>>>         ... 47 more
>>> j
Mime
View raw message