kylin-user mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Ahmed Mahran <ahmed.mah...@badrit.com>
Subject Re: Timeout visiting cube!
Date Tue, 12 Jul 2016 21:32:19 GMT
Confirmed

Region server stopped on scan. Increasing region server's heap size fixed
the problem.

On Tue, Jul 12, 2016 at 9:16 PM, Li Yang <liyang@apache.org> wrote:

> > Caused by: java.net.ConnectException: Connection refused
>
> Seems hbase config on the node was not in good shape.
>
> On Tue, Jul 12, 2016 at 11:40 PM, Ahmed Mahran <ahmed.mahran@badrit.com>
> wrote:
>
>> Hi,
>>
>> Kylin 1.5.2.1-cdh5.7
>> Hadoop 2.6.0-cdh5.7.1
>> Hive 1.1.0-cdh5.7.1
>> HBase 1.2.0-cdh5.7.1
>>
>> kylin.log
>>
>> 2016-07-12 08:10:03,894 ERROR [pool-5-thread-1] dao.ExecutableDao:145 :
>> error get all Jobs:
>> org.apache.hadoop.hbase.client.RetriesExhaustedException: Can't get the
>> location
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:316)
>> at
>> org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:156)
>> at
>> org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:60)
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:320)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:295)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:160)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:155)
>> at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:867)
>> at
>> org.apache.kylin.storage.hbase.HBaseResourceStore.visitFolder(HBaseResourceStore.java:137)
>> at
>> org.apache.kylin.storage.hbase.HBaseResourceStore.listResourcesImpl(HBaseResourceStore.java:107)
>> at
>> org.apache.kylin.common.persistence.ResourceStore.listResources(ResourceStore.java:123)
>> at
>> org.apache.kylin.job.dao.ExecutableDao.getJobIds(ExecutableDao.java:135)
>> at
>> org.apache.kylin.job.manager.ExecutableManager.getAllJobIds(ExecutableManager.java:204)
>> at
>> org.apache.kylin.job.impl.threadpool.DefaultScheduler$FetcherRunner.run(DefaultScheduler.java:81)
>> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>> at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
>> at
>> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
>> at
>> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>> at java.lang.Thread.run(Thread.java:745)
>> Caused by: java.net.ConnectException: Connection refused
>> at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
>> at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
>> at
>> org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupConnection(RpcClientImpl.java:416)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupIOstreams(RpcClientImpl.java:722)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.writeRequest(RpcClientImpl.java:906)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.tracedWriteRequest(RpcClientImpl.java:873)
>> at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1242)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331)
>> at
>> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.get(ClientProtos.java:34070)
>> at
>> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRowOrBefore(ProtobufUtil.java:1582)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1398)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1199)
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:305)
>> ... 21 more
>> 2016-07-12 08:10:03,900 ERROR [pool-5-thread-1]
>> manager.ExecutableManager:206 : error get All Job Ids
>> org.apache.kylin.job.exception.PersistentException:
>> org.apache.hadoop.hbase.client.RetriesExhaustedException: Can't get the
>> location
>> at
>> org.apache.kylin.job.dao.ExecutableDao.getJobIds(ExecutableDao.java:146)
>> at
>> org.apache.kylin.job.manager.ExecutableManager.getAllJobIds(ExecutableManager.java:204)
>> at
>> org.apache.kylin.job.impl.threadpool.DefaultScheduler$FetcherRunner.run(DefaultScheduler.java:81)
>> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>> at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
>> at
>> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
>> at
>> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>> at java.lang.Thread.run(Thread.java:745)
>> Caused by: org.apache.hadoop.hbase.client.RetriesExhaustedException:
>> Can't get the location
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:316)
>> at
>> org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:156)
>> at
>> org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:60)
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:320)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:295)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:160)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:155)
>> at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:867)
>> at
>> org.apache.kylin.storage.hbase.HBaseResourceStore.visitFolder(HBaseResourceStore.java:137)
>> at
>> org.apache.kylin.storage.hbase.HBaseResourceStore.listResourcesImpl(HBaseResourceStore.java:107)
>> at
>> org.apache.kylin.common.persistence.ResourceStore.listResources(ResourceStore.java:123)
>> at
>> org.apache.kylin.job.dao.ExecutableDao.getJobIds(ExecutableDao.java:135)
>> ... 9 more
>> Caused by: java.net.ConnectException: Connection refused
>> at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
>> at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
>> at
>> org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupConnection(RpcClientImpl.java:416)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupIOstreams(RpcClientImpl.java:722)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.writeRequest(RpcClientImpl.java:906)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.tracedWriteRequest(RpcClientImpl.java:873)
>> at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1242)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331)
>> at
>> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.get(ClientProtos.java:34070)
>> at
>> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRowOrBefore(ProtobufUtil.java:1582)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1398)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1199)
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:305)
>> ... 21 more
>> 2016-07-12 08:10:03,901 WARN  [pool-5-thread-1]
>> threadpool.DefaultScheduler:108 : Job Fetcher caught a exception
>> java.lang.RuntimeException:
>> org.apache.kylin.job.exception.PersistentException:
>> org.apache.hadoop.hbase.client.RetriesExhaustedException: Can't get the
>> location
>> 2016-07-12 08:10:11,955 INFO  [http-bio-7070-exec-7]
>> controller.QueryController:174 : Using project: CDR_Demo_Project
>> 2016-07-12 08:10:11,958 INFO  [http-bio-7070-exec-7]
>> controller.QueryController:175 : The original query:  select * from
>> CDR_AGGREGATION
>> 2016-07-12 08:10:11,960 INFO  [http-bio-7070-exec-7]
>> service.QueryService:266 : The corrected query: select * from
>> CDR_AGGREGATION
>> LIMIT 50000
>> 2016-07-12 08:10:12,053 INFO  [http-bio-7070-exec-7]
>> routing.QueryRouter:48 : The project manager's reference is
>> org.apache.kylin.metadata.project.ProjectManager@5759a069
>> 2016-07-12 08:10:12,054 INFO  [http-bio-7070-exec-7]
>> routing.QueryRouter:60 : Find candidates by table DEFAULT.CDR_AGGREGATION
>> and project=CDR_DEMO_PROJECT :
>> org.apache.kylin.query.routing.Candidate@44071cfe
>> 2016-07-12 08:10:12,055 INFO  [http-bio-7070-exec-7]
>> routing.QueryRouter:49 : Applying rule: class
>> org.apache.kylin.query.routing.rules.RemoveUncapableRealizationsRule,
>> realizations before: [CDR_Demo_Cube_1(CUBE)], realizations after:
>> [CDR_Demo_Cube_1(CUBE)]
>> 2016-07-12 08:10:12,055 INFO  [http-bio-7070-exec-7]
>> routing.QueryRouter:49 : Applying rule: class
>> org.apache.kylin.query.routing.rules.RealizationSortRule, realizations
>> before: [CDR_Demo_Cube_1(CUBE)], realizations after: [CDR_Demo_Cube_1(CUBE)]
>> 2016-07-12 08:10:12,056 INFO  [http-bio-7070-exec-7]
>> routing.QueryRouter:72 : The realizations remaining:
>> [CDR_Demo_Cube_1(CUBE)] And the final chosen one is the first one
>> 2016-07-12 08:10:12,077 DEBUG [http-bio-7070-exec-7]
>> enumerator.OLAPEnumerator:107 : query storage...
>> 2016-07-12 08:10:12,078 INFO  [http-bio-7070-exec-7]
>> enumerator.OLAPEnumerator:181 : No group by and aggregation found in this
>> query, will hack some result for better look of output...
>> 2016-07-12 08:10:12,078 INFO  [http-bio-7070-exec-7]
>> v2.CubeStorageQuery:239 : exactAggregation is true
>> 2016-07-12 08:10:12,079 INFO  [http-bio-7070-exec-7]
>> v2.CubeStorageQuery:357 : Enable limit 50000
>> 2016-07-12 08:10:12,087 DEBUG [http-bio-7070-exec-7]
>> v2.CubeHBaseEndpointRPC:257 : New scanner for current segment
>> CDR_Demo_Cube_1[20150101000000_20160101000000] will use
>> SCAN_FILTER_AGGR_CHECKMEM as endpoint's behavior
>> 2016-07-12 08:10:12,088 DEBUG [http-bio-7070-exec-7]
>> v2.CubeHBaseEndpointRPC:313 : Serialized scanRequestBytes 660 bytes,
>> rawScanBytesString 106 bytes
>> 2016-07-12 08:10:12,088 INFO  [http-bio-7070-exec-7]
>> v2.CubeHBaseEndpointRPC:315 : The scan 416215cb for segment
>> CDR_Demo_Cube_1[20150101000000_20160101000000] is as below with 1 separate
>> raw scans, shard part of start/end key is set to 0
>> 2016-07-12 08:10:12,090 INFO  [http-bio-7070-exec-7] v2.CubeHBaseRPC:271
>> : Visiting hbase table KYLIN_HX9PP90NMQ: cuboid exact match, from 15 to 15
>> Start:
>> \x00\x00\x00\x00\x00\x00\x00\x00\x00\x0F\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00
>> (\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0F\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00)
>> Stop:
>>  \x00\x00\x00\x00\x00\x00\x00\x00\x00\x0F\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x00
>> (\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0F\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x00),
>> No Fuzzy Key
>> 2016-07-12 08:10:12,090 DEBUG [http-bio-7070-exec-7]
>> v2.CubeHBaseEndpointRPC:320 : Submitting rpc to 2 shards starting from
>> shard 0, scan range count 1
>> 2016-07-12 08:10:12,163 INFO  [http-bio-7070-exec-7]
>> v2.CubeHBaseEndpointRPC:103 : Timeout for ExpectedSizeIterator is: 66000
>> 2016-07-12 08:10:12,164 DEBUG [http-bio-7070-exec-7]
>> enumerator.OLAPEnumerator:127 : return TupleIterator...
>> 2016-07-12 08:10:45,430 ERROR [pool-11-thread-5]
>> util.LoggableCachedThreadPool:44 : Execution exception when running task in
>> pool-11-thread-5
>> 2016-07-12 08:10:45,432 ERROR [pool-11-thread-5]
>> util.LoggableCachedThreadPool:54 : Caught exception in thread
>> pool-11-thread-5:
>> java.lang.RuntimeException: <sub-thread for GTScanRequest 416215cb> Error
>> when visiting cubes by endpoint
>> at
>> org.apache.kylin.storage.hbase.cube.v2.CubeHBaseEndpointRPC$1.run(CubeHBaseEndpointRPC.java:345)
>> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>> at java.lang.Thread.run(Thread.java:745)
>> Caused by: java.net.ConnectException: Connection refused
>> at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
>> at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
>> at
>> org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupConnection(RpcClientImpl.java:416)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupIOstreams(RpcClientImpl.java:722)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.writeRequest(RpcClientImpl.java:906)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.tracedWriteRequest(RpcClientImpl.java:873)
>> at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1242)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331)
>> at
>> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.get(ClientProtos.java:34070)
>> at
>> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRowOrBefore(ProtobufUtil.java:1582)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1398)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1199)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1179)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1136)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getRegionLocation(ConnectionManager.java:971)
>> at
>> org.apache.hadoop.hbase.client.HRegionLocator.getRegionLocation(HRegionLocator.java:83)
>> at
>> org.apache.hadoop.hbase.client.HTable.getRegionLocation(HTable.java:569)
>> at
>> org.apache.hadoop.hbase.client.HTable.getKeysAndRegionsInRange(HTable.java:793)
>> at
>> org.apache.hadoop.hbase.client.HTable.getKeysAndRegionsInRange(HTable.java:763)
>> at
>> org.apache.hadoop.hbase.client.HTable.getStartKeysInRange(HTable.java:1830)
>> at
>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1785)
>> at
>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1765)
>> at
>> org.apache.kylin.storage.hbase.cube.v2.CubeHBaseEndpointRPC.getResults(CubeHBaseEndpointRPC.java:389)
>> at
>> org.apache.kylin.storage.hbase.cube.v2.CubeHBaseEndpointRPC.access$200(CubeHBaseEndpointRPC.java:75)
>> at
>> org.apache.kylin.storage.hbase.cube.v2.CubeHBaseEndpointRPC$1.run(CubeHBaseEndpointRPC.java:343)
>> ... 5 more
>> 2016-07-12 08:11:03,839 ERROR [pool-5-thread-1] dao.ExecutableDao:145 :
>> error get all Jobs:
>> org.apache.hadoop.hbase.client.RetriesExhaustedException: Can't get the
>> location
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:316)
>> at
>> org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:156)
>> at
>> org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:60)
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:320)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:295)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:160)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:155)
>> at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:867)
>> at
>> org.apache.kylin.storage.hbase.HBaseResourceStore.visitFolder(HBaseResourceStore.java:137)
>> at
>> org.apache.kylin.storage.hbase.HBaseResourceStore.listResourcesImpl(HBaseResourceStore.java:107)
>> at
>> org.apache.kylin.common.persistence.ResourceStore.listResources(ResourceStore.java:123)
>> at
>> org.apache.kylin.job.dao.ExecutableDao.getJobIds(ExecutableDao.java:135)
>> at
>> org.apache.kylin.job.manager.ExecutableManager.getAllJobIds(ExecutableManager.java:204)
>> at
>> org.apache.kylin.job.impl.threadpool.DefaultScheduler$FetcherRunner.run(DefaultScheduler.java:81)
>> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>> at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
>> at
>> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
>> at
>> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>> at java.lang.Thread.run(Thread.java:745)
>> Caused by: java.net.ConnectException: Connection refused
>> at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
>> at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
>> at
>> org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupConnection(RpcClientImpl.java:416)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupIOstreams(RpcClientImpl.java:722)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.writeRequest(RpcClientImpl.java:906)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.tracedWriteRequest(RpcClientImpl.java:873)
>> at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1242)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331)
>> at
>> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.get(ClientProtos.java:34070)
>> at
>> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRowOrBefore(ProtobufUtil.java:1582)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1398)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1199)
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:305)
>> ... 21 more
>> 2016-07-12 08:11:03,842 ERROR [pool-5-thread-1]
>> manager.ExecutableManager:206 : error get All Job Ids
>> org.apache.kylin.job.exception.PersistentException:
>> org.apache.hadoop.hbase.client.RetriesExhaustedException: Can't get the
>> location
>> at
>> org.apache.kylin.job.dao.ExecutableDao.getJobIds(ExecutableDao.java:146)
>> at
>> org.apache.kylin.job.manager.ExecutableManager.getAllJobIds(ExecutableManager.java:204)
>> at
>> org.apache.kylin.job.impl.threadpool.DefaultScheduler$FetcherRunner.run(DefaultScheduler.java:81)
>> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>> at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
>> at
>> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
>> at
>> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>> at java.lang.Thread.run(Thread.java:745)
>> Caused by: org.apache.hadoop.hbase.client.RetriesExhaustedException:
>> Can't get the location
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:316)
>> at
>> org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:156)
>> at
>> org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:60)
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:320)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:295)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:160)
>> at
>> org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:155)
>> at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:867)
>> at
>> org.apache.kylin.storage.hbase.HBaseResourceStore.visitFolder(HBaseResourceStore.java:137)
>> at
>> org.apache.kylin.storage.hbase.HBaseResourceStore.listResourcesImpl(HBaseResourceStore.java:107)
>> at
>> org.apache.kylin.common.persistence.ResourceStore.listResources(ResourceStore.java:123)
>> at
>> org.apache.kylin.job.dao.ExecutableDao.getJobIds(ExecutableDao.java:135)
>> ... 9 more
>> Caused by: java.net.ConnectException: Connection refused
>> at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
>> at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
>> at
>> org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
>> at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupConnection(RpcClientImpl.java:416)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupIOstreams(RpcClientImpl.java:722)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.writeRequest(RpcClientImpl.java:906)
>> at
>> org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.tracedWriteRequest(RpcClientImpl.java:873)
>> at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1242)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226)
>> at
>> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331)
>> at
>> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.get(ClientProtos.java:34070)
>> at
>> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRowOrBefore(ProtobufUtil.java:1582)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1398)
>> at
>> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1199)
>> at
>> org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:305)
>> ... 21 more
>> 2016-07-12 08:11:03,843 WARN  [pool-5-thread-1]
>> threadpool.DefaultScheduler:108 : Job Fetcher caught a exception
>> java.lang.RuntimeException:
>> org.apache.kylin.job.exception.PersistentException:
>> org.apache.hadoop.hbase.client.RetriesExhaustedException: Can't get the
>> location
>> 2016-07-12 08:11:18,165 ERROR [http-bio-7070-exec-7]
>> controller.QueryController:209 : Exception when execute sql
>> java.sql.SQLException: Error while executing SQL "select * from
>> CDR_AGGREGATION
>> LIMIT 50000": Timeout visiting cube!
>> at org.apache.calcite.avatica.Helper.createException(Helper.java:56)
>> at org.apache.calcite.avatica.Helper.createException(Helper.java:41)
>> at
>> org.apache.calcite.avatica.AvaticaStatement.executeInternal(AvaticaStatement.java:143)
>> at
>> org.apache.calcite.avatica.AvaticaStatement.executeQuery(AvaticaStatement.java:186)
>> at
>> org.apache.kylin.rest.service.QueryService.execute(QueryService.java:361)
>> at
>> org.apache.kylin.rest.service.QueryService.queryWithSqlMassage(QueryService.java:273)
>> at org.apache.kylin.rest.service.QueryService.query(QueryService.java:121)
>> at
>> org.apache.kylin.rest.service.QueryService$$FastClassByCGLIB$$4957273f.invoke(<generated>)
>> at net.sf.cglib.proxy.MethodProxy.invoke(MethodProxy.java:204)
>> at
>> org.springframework.aop.framework.Cglib2AopProxy$DynamicAdvisedInterceptor.intercept(Cglib2AopProxy.java:618)
>> at
>> org.apache.kylin.rest.service.QueryService$$EnhancerByCGLIB$$2b43fc30.query(<generated>)
>> at
>> org.apache.kylin.rest.controller.QueryController.doQueryWithCache(QueryController.java:192)
>> at
>> org.apache.kylin.rest.controller.QueryController.query(QueryController.java:94)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:497)
>> at
>> org.springframework.web.method.support.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:213)
>> at
>> org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:126)
>> at
>> org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:96)
>> at
>> org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:617)
>> at
>> org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:578)
>> at
>> org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:80)
>> at
>> org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:923)
>> at
>> org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:852)
>> at
>> org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:882)
>> at
>> org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:789)
>> at javax.servlet.http.HttpServlet.service(HttpServlet.java:646)
>> at javax.servlet.http.HttpServlet.service(HttpServlet.java:727)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:303)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
>> at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:241)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:330)
>> at
>> org.springframework.security.web.access.intercept.FilterSecurityInterceptor.invoke(FilterSecurityInterceptor.java:118)
>> at
>> org.springframework.security.web.access.intercept.FilterSecurityInterceptor.doFilter(FilterSecurityInterceptor.java:84)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.access.ExceptionTranslationFilter.doFilter(ExceptionTranslationFilter.java:113)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.session.SessionManagementFilter.doFilter(SessionManagementFilter.java:103)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.AnonymousAuthenticationFilter.doFilter(AnonymousAuthenticationFilter.java:113)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.servletapi.SecurityContextHolderAwareRequestFilter.doFilter(SecurityContextHolderAwareRequestFilter.java:54)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.savedrequest.RequestCacheAwareFilter.doFilter(RequestCacheAwareFilter.java:45)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.www.BasicAuthenticationFilter.doFilter(BasicAuthenticationFilter.java:201)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.ui.DefaultLoginPageGeneratingFilter.doFilter(DefaultLoginPageGeneratingFilter.java:91)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter.doFilter(AbstractAuthenticationProcessingFilter.java:183)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.logout.LogoutFilter.doFilter(LogoutFilter.java:105)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.context.SecurityContextPersistenceFilter.doFilter(SecurityContextPersistenceFilter.java:87)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.FilterChainProxy.doFilterInternal(FilterChainProxy.java:192)
>> at
>> org.springframework.security.web.FilterChainProxy.doFilter(FilterChainProxy.java:160)
>> at
>> org.springframework.web.filter.DelegatingFilterProxy.invokeDelegate(DelegatingFilterProxy.java:346)
>> at
>> org.springframework.web.filter.DelegatingFilterProxy.doFilter(DelegatingFilterProxy.java:259)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:241)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
>> at com.thetransactioncompany.cors.CORSFilter.doFilter(CORSFilter.java:195)
>> at com.thetransactioncompany.cors.CORSFilter.doFilter(CORSFilter.java:266)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:241)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
>> at
>> org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:220)
>> at
>> org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:122)
>> at
>> org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:504)
>> at
>> org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:170)
>> at
>> org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:103)
>> at
>> org.apache.catalina.valves.AccessLogValve.invoke(AccessLogValve.java:950)
>> at
>> org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:116)
>> at
>> org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:421)
>> at
>> org.apache.coyote.http11.AbstractHttp11Processor.process(AbstractHttp11Processor.java:1074)
>> at
>> org.apache.coyote.AbstractProtocol$AbstractConnectionHandler.process(AbstractProtocol.java:611)
>> at
>> org.apache.tomcat.util.net.JIoEndpoint$SocketProcessor.run(JIoEndpoint.java:314)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>> at
>> org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
>> at java.lang.Thread.run(Thread.java:745)
>> Caused by: java.lang.RuntimeException: Timeout visiting cube!
>> at
>> org.apache.kylin.storage.hbase.cube.v2.CubeHBaseEndpointRPC$ExpectedSizeIterator.next(CubeHBaseEndpointRPC.java:127)
>> at
>> org.apache.kylin.storage.hbase.cube.v2.CubeHBaseEndpointRPC$ExpectedSizeIterator.next(CubeHBaseEndpointRPC.java:81)
>> at
>> com.google.common.collect.TransformedIterator.next(TransformedIterator.java:48)
>> at com.google.common.collect.Iterators$6.hasNext(Iterators.java:583)
>> at
>> org.apache.kylin.storage.hbase.cube.v2.SequentialCubeTupleIterator.hasNext(SequentialCubeTupleIterator.java:96)
>> at
>> org.apache.kylin.query.enumerator.OLAPEnumerator.moveNext(OLAPEnumerator.java:74)
>> at
>> org.apache.calcite.linq4j.EnumerableDefaults$TakeWhileEnumerator.moveNext(EnumerableDefaults.java:2818)
>> at
>> org.apache.calcite.linq4j.Linq4j$EnumeratorIterator.<init>(Linq4j.java:664)
>> at org.apache.calcite.linq4j.Linq4j.enumeratorIterator(Linq4j.java:98)
>> at
>> org.apache.calcite.linq4j.AbstractEnumerable.iterator(AbstractEnumerable.java:33)
>> at org.apache.calcite.avatica.MetaImpl.createCursor(MetaImpl.java:85)
>> at
>> org.apache.calcite.avatica.AvaticaResultSet.execute(AvaticaResultSet.java:190)
>> at
>> org.apache.calcite.jdbc.CalciteResultSet.execute(CalciteResultSet.java:65)
>> at
>> org.apache.calcite.jdbc.CalciteResultSet.execute(CalciteResultSet.java:44)
>> at
>> org.apache.calcite.avatica.AvaticaConnection$1.execute(AvaticaConnection.java:566)
>> at
>> org.apache.calcite.jdbc.CalciteMetaImpl.prepareAndExecute(CalciteMetaImpl.java:578)
>> at
>> org.apache.calcite.avatica.AvaticaConnection.prepareAndExecuteInternal(AvaticaConnection.java:571)
>> at
>> org.apache.calcite.avatica.AvaticaStatement.executeInternal(AvaticaStatement.java:135)
>> ... 80 more
>> 2016-07-12 08:11:18,173 INFO  [http-bio-7070-exec-7]
>> service.QueryService:250 :
>> ==========================[QUERY]===============================
>> SQL: select * from CDR_AGGREGATION
>> User: ADMIN
>> Success: false
>> Duration: 0.0
>> Project: CDR_Demo_Project
>> Realization Names: [CDR_Demo_Cube_1]
>> Cuboid Ids: [15]
>> Total scan count: 0
>> Result row count: 0
>> Accept Partial: true
>> Is Partial Result: false
>> Hit Exception Cache: false
>> Storage cache used: false
>> Message: Error while executing SQL "select * from CDR_AGGREGATION LIMIT
>> 50000": Timeout visiting cube!
>> ==========================[QUERY]===============================
>>
>> 2016-07-12 08:11:18,174 ERROR [http-bio-7070-exec-7]
>> controller.BasicController:44 :
>> org.apache.kylin.rest.exception.InternalErrorException: Error while
>> executing SQL "select * from CDR_AGGREGATION LIMIT 50000": Timeout visiting
>> cube!
>> at
>> org.apache.kylin.rest.controller.QueryController.doQueryWithCache(QueryController.java:224)
>> at
>> org.apache.kylin.rest.controller.QueryController.query(QueryController.java:94)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:497)
>> at
>> org.springframework.web.method.support.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:213)
>> at
>> org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:126)
>> at
>> org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:96)
>> at
>> org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:617)
>> at
>> org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:578)
>> at
>> org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:80)
>> at
>> org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:923)
>> at
>> org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:852)
>> at
>> org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:882)
>> at
>> org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:789)
>> at javax.servlet.http.HttpServlet.service(HttpServlet.java:646)
>> at javax.servlet.http.HttpServlet.service(HttpServlet.java:727)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:303)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
>> at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:241)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:330)
>> at
>> org.springframework.security.web.access.intercept.FilterSecurityInterceptor.invoke(FilterSecurityInterceptor.java:118)
>> at
>> org.springframework.security.web.access.intercept.FilterSecurityInterceptor.doFilter(FilterSecurityInterceptor.java:84)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.access.ExceptionTranslationFilter.doFilter(ExceptionTranslationFilter.java:113)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.session.SessionManagementFilter.doFilter(SessionManagementFilter.java:103)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.AnonymousAuthenticationFilter.doFilter(AnonymousAuthenticationFilter.java:113)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.servletapi.SecurityContextHolderAwareRequestFilter.doFilter(SecurityContextHolderAwareRequestFilter.java:54)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.savedrequest.RequestCacheAwareFilter.doFilter(RequestCacheAwareFilter.java:45)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.www.BasicAuthenticationFilter.doFilter(BasicAuthenticationFilter.java:201)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.ui.DefaultLoginPageGeneratingFilter.doFilter(DefaultLoginPageGeneratingFilter.java:91)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter.doFilter(AbstractAuthenticationProcessingFilter.java:183)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.authentication.logout.LogoutFilter.doFilter(LogoutFilter.java:105)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.context.SecurityContextPersistenceFilter.doFilter(SecurityContextPersistenceFilter.java:87)
>> at
>> org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:342)
>> at
>> org.springframework.security.web.FilterChainProxy.doFilterInternal(FilterChainProxy.java:192)
>> at
>> org.springframework.security.web.FilterChainProxy.doFilter(FilterChainProxy.java:160)
>> at
>> org.springframework.web.filter.DelegatingFilterProxy.invokeDelegate(DelegatingFilterProxy.java:346)
>> at
>> org.springframework.web.filter.DelegatingFilterProxy.doFilter(DelegatingFilterProxy.java:259)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:241)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
>> at com.thetransactioncompany.cors.CORSFilter.doFilter(CORSFilter.java:195)
>> at com.thetransactioncompany.cors.CORSFilter.doFilter(CORSFilter.java:266)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:241)
>> at
>> org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
>> at
>> org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:220)
>> at
>> org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:122)
>> at
>> org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:504)
>> at
>> org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:170)
>> at
>> org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:103)
>> at
>> org.apache.catalina.valves.AccessLogValve.invoke(AccessLogValve.java:950)
>> at
>> org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:116)
>> at
>> org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:421)
>> at
>> org.apache.coyote.http11.AbstractHttp11Processor.process(AbstractHttp11Processor.java:1074)
>> at
>> org.apache.coyote.AbstractProtocol$AbstractConnectionHandler.process(AbstractProtocol.java:611)
>> at
>> org.apache.tomcat.util.net.JIoEndpoint$SocketProcessor.run(JIoEndpoint.java:314)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>> at
>> org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
>> at java.lang.Thread.run(Thread.java:745)
>>
>>
>>
>>
>

Mime
View raw message