kylin-user mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "jianhui.yi" <jianhui...@zhiyoubao.com>
Subject table_snapshot file does not exist
Date Thu, 18 May 2017 06:25:49 GMT
Hi all:

       When I build cube to run step 4: Build Dimension Dictionary, the
following error occurred,how to solve it?

When I use the dimensions of this table, this error will appear.

 

java.io.FileNotFoundException: File does not exist:
/kylin/kylin_metadata/resources/table_snapshot/DW.DIM_PRODUCT/1394db19-c200-
46f8-833c-d28878629246.snapshot

                    at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:66)

                    at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)

                    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSN
amesystem.java:2007)

                    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSName
system.java:1977)

                    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSName
system.java:1890)

                    at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(N
ameNodeRpcServer.java:572)

                    at
org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProto
col.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:89)

                    at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslator
PB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:365)

                    at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNam
enodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)

                    at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(Proto
bufRpcEngine.java:617)

                    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)

                    at
org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2141)

                    at
org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2137)

                    at java.security.AccessController.doPrivileged(Native
Method)

                    at javax.security.auth.Subject.doAs(Subject.java:415)

                    at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.ja
va:1783)

                    at
org.apache.hadoop.ipc.Server$Handler.run(Server.java:2135)

 

                    at
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

                    at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAcces
sorImpl.java:57)

                    at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstruc
torAccessorImpl.java:45)

                    at
java.lang.reflect.Constructor.newInstance(Constructor.java:526)

                    at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.j
ava:106)

                    at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.
java:73)

                    at
org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1281)

                    at
org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1266)

                    at
org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1254)

                    at
org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLengt
h(DFSInputStream.java:305)

                    at
org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:271)

                    at
org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:263)

                    at
org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1585)

                    at
org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.
java:309)

                    at
org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.
java:305)

                    at
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.j
ava:81)

                    at
org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java
:305)

                    at
org.apache.hadoop.fs.FileSystem.open(FileSystem.java:779)

                    at
org.apache.kylin.storage.hbase.HBaseResourceStore.getInputStream(HBaseResour
ceStore.java:207)

                    at
org.apache.kylin.storage.hbase.HBaseResourceStore.getResourceImpl(HBaseResou
rceStore.java:227)

                    at
org.apache.kylin.common.persistence.ResourceStore.getResource(ResourceStore.
java:148)

                    at
org.apache.kylin.dict.lookup.SnapshotManager.load(SnapshotManager.java:217)

                    at
org.apache.kylin.dict.lookup.SnapshotManager.checkDupByInfo(SnapshotManager.
java:182)

                    at
org.apache.kylin.dict.lookup.SnapshotManager.buildSnapshot(SnapshotManager.j
ava:128)

                    at
org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:285)

                    at
org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGe
neratorCLI.java:92)

                    at
org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGe
neratorCLI.java:54)

                    at
org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob
.java:66)

                    at
org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)

                    at
org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84)

                    at
org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellEx
ecutable.java:63)

                    at
org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable
.java:124)

                    at
org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChaine
dExecutable.java:64)

                    at
org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable
.java:124)

                    at
org.apache.kylin.job.impl.threadpool.DefaultScheduler$JobRunner.run(DefaultS
cheduler.java:142)

                    at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:11
45)

                    at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:6
15)

                    at java.lang.Thread.run(Thread.java:745)

Caused by:
org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): File
does not exist:
/kylin/kylin_metadata/resources/table_snapshot/DW.DIM_PRODUCT/1394db19-c200-
46f8-833c-d28878629246.snapshot

                    at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:66)

                    at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)

                    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSN
amesystem.java:2007)

                    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSName
system.java:1977)

                    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSName
system.java:1890)

                    at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(N
ameNodeRpcServer.java:572)

                    at
org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProto
col.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:89)

                    at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslator
PB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:365)

                    at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNam
enodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)

                    at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(Proto
bufRpcEngine.java:617)

                    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)

                    at
org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2141)

                    at
org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2137)

                    at java.security.AccessController.doPrivileged(Native
Method)

                    at javax.security.auth.Subject.doAs(Subject.java:415)

                    at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.ja
va:1783)

                    at
org.apache.hadoop.ipc.Server$Handler.run(Server.java:2135)

 

                    at org.apache.hadoop.ipc.Client.call(Client.java:1472)

                    at org.apache.hadoop.ipc.Client.call(Client.java:1409)

                    at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.jav
a:230)

                    at com.sun.proxy.$Proxy30.getBlockLocations(Unknown
Source)

                    at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBloc
kLocations(ClientNamenodeProtocolTranslatorPB.java:256)

                    at sun.reflect.GeneratedMethodAccessor174.invoke(Unknown
Source)

                    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl
.java:43)

                    at java.lang.reflect.Method.invoke(Method.java:606)

                    at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocati
onHandler.java:256)

                    at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHand
ler.java:104)

                    at com.sun.proxy.$Proxy31.getBlockLocations(Unknown
Source)

                    at
org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1279)

                    ... 31 more

 

result code:2

 

 


Mime
View raw message