asterixdb-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "Ian Maxon (Code Review)" <do-not-re...@asterix-gerrit.ics.uci.edu>
Subject Change in asterixdb[master]: whitespace fix for commit b361b06
Date Thu, 20 Aug 2015 04:26:03 GMT
Ian Maxon has submitted this change and it was merged.

Change subject: whitespace fix for commit b361b06
......................................................................


whitespace fix for commit b361b06

Change-Id: I9b01a2cacb2a8efe0a4c0bb97436a9c59edb8e42
Reviewed-on: https://asterix-gerrit.ics.uci.edu/357
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Ian Maxon <imaxon@apache.org>
---
M asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
M asterix-doc/src/site/markdown/feeds/tutorial.md
M asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
M asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
4 files changed, 129 insertions(+), 130 deletions(-)

Approvals:
  Ian Maxon: Looks good to me, approved
  Jenkins: Verified



diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 5030c0d..93917cc 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -2120,8 +2120,8 @@
             // All Metadata checks have passed. Feed connect request is valid. //
 
             FeedPolicyAccessor policyAccessor = new FeedPolicyAccessor(feedPolicy.getProperties());
-            Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> triple =
getFeedConnectionRequest(dataverseName, feed,
-                    cbfs.getDatasetName(), feedPolicy, mdTxnCtx);
+            Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> triple =
getFeedConnectionRequest(dataverseName,
+                    feed, cbfs.getDatasetName(), feedPolicy, mdTxnCtx);
             FeedConnectionRequest connectionRequest = triple.first;
             boolean createFeedIntakeJob = triple.second;
 
@@ -2135,8 +2135,8 @@
                         metadataProvider, policyAccessor);
                 // adapter configuration are valid at this stage
                 // register the feed joints (these are auto-de-registered)
-                for (IFeedJoint fj : triple.third){
-                    FeedLifecycleListener.INSTANCE.registerFeedJoint(fj);   
+                for (IFeedJoint fj : triple.third) {
+                    FeedLifecycleListener.INSTANCE.registerFeedJoint(fj);
                 }
                 runJob(hcc, pair.first, false);
                 IFeedAdapterFactory adapterFactory = pair.second;
@@ -2146,8 +2146,8 @@
                 }
                 eventSubscriber.assertEvent(FeedLifecycleEvent.FEED_INTAKE_STARTED);
             } else {
-                for (IFeedJoint fj : triple.third){
-                    FeedLifecycleListener.INSTANCE.registerFeedJoint(fj);   
+                for (IFeedJoint fj : triple.third) {
+                    FeedLifecycleListener.INSTANCE.registerFeedJoint(fj);
                 }
             }
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -2193,8 +2193,9 @@
      * @return
      * @throws MetadataException
      */
-    private Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> getFeedConnectionRequest(String
dataverse, Feed feed, String dataset,
-            FeedPolicy feedPolicy, MetadataTransactionContext mdTxnCtx) throws MetadataException
{
+    private Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> getFeedConnectionRequest(String
dataverse,
+            Feed feed, String dataset, FeedPolicy feedPolicy, MetadataTransactionContext
mdTxnCtx)
+            throws MetadataException {
         IFeedJoint sourceFeedJoint = null;
         FeedConnectionRequest request = null;
         List<String> functionsToApply = new ArrayList<String>();
diff --git a/asterix-doc/src/site/markdown/feeds/tutorial.md b/asterix-doc/src/site/markdown/feeds/tutorial.md
index 009e2b1..c6ad73c 100644
--- a/asterix-doc/src/site/markdown/feeds/tutorial.md
+++ b/asterix-doc/src/site/markdown/feeds/tutorial.md
@@ -34,14 +34,14 @@
 AsterixDB currently provides built-in adaptors for several popular
 data sources—Twitter, CNN, and RSS feeds. AsterixDB additionally
 provides a generic socket-based adaptor that can be used
-to ingest data that is directed at a prescribed socket. 
+to ingest data that is directed at a prescribed socket.
 
 
 In this tutorial, we shall describe building two example data ingestion pipelines that cover
the popular scenario of ingesting data from (a) Twitter and (b) RSS Feed source.
 
-####Ingesting Twitter Stream 
+####Ingesting Twitter Stream
 We shall use the built-in push-based Twitter adaptor.
-As a pre-requisite, we must define a Tweet using the AsterixDB Data Model (ADM) and the AsterixDB
Query Language (AQL). Given below are the type definition in AQL that create a Tweet datatype
which is representative of a real tweet as obtained from Twitter.  
+As a pre-requisite, we must define a Tweet using the AsterixDB Data Model (ADM) and the AsterixDB
Query Language (AQL). Given below are the type definition in AQL that create a Tweet datatype
which is representative of a real tweet as obtained from Twitter.
 
         create dataverse feeds;
         use dataverse feeds;
@@ -63,15 +63,15 @@
             message_text:string
         };
 
-	    create dataset Tweets (Tweet)
+        create dataset Tweets (Tweet)
         primary key id;
 
-We also create a dataset that we shall use to persist the tweets in AsterixDB. 
-Next we make use of the create feed AQL statement to define our example data feed. 
+We also create a dataset that we shall use to persist the tweets in AsterixDB.
+Next we make use of the create feed AQL statement to define our example data feed.
 
 #####Using the "push_twitter" feed adapter#####
 The push_twitter adaptor requires setting up an application account with Twitter. To retrieve
-tweets, Twitter requires registering an application with Twitter. Registration involves providing
a name and a brief description for the application. Each application has an associated OAuth
authentication credential that includes OAuth keys and tokens. Accessing the 
+tweets, Twitter requires registering an application with Twitter. Registration involves providing
a name and a brief description for the application. Each application has an associated OAuth
authentication credential that includes OAuth keys and tokens. Accessing the
 Twitter API requires providing the following.
 1. Consumer Key (API Key)
 2. Consumer Secret (API Secret)
@@ -79,60 +79,60 @@
 4. Access Token Secret
 
 
-The "push_twitter" adaptor takes as configuration the above mentioned parameters. End-user(s)
are required to obtain the above authentication credentials prior to using the "push_twitter"
adaptor. For further information on obtaining OAuth keys and tokens and registering an application
with Twitter, please visit http://apps.twitter.com 
+The "push_twitter" adaptor takes as configuration the above mentioned parameters. End-user(s)
are required to obtain the above authentication credentials prior to using the "push_twitter"
adaptor. For further information on obtaining OAuth keys and tokens and registering an application
with Twitter, please visit http://apps.twitter.com
 
-Given below is an example AQL statement that creates a feed - TwitterFeed by using the 
-"push_twitter" adaptor. 
+Given below is an example AQL statement that creates a feed - TwitterFeed by using the
+"push_twitter" adaptor.
 
 
 
         create feed TwitterFeed if not exists using "push_twitter"
         (("type-name"="Tweet"),
-         ("consumer.key"="************"),  
+         ("consumer.key"="************"),
          ("consumer.secret"="**************"),
-         ("access.token"="**********"),  
+         ("access.token"="**********"),
          ("access.token.secret"="*************"));
 
-It is required that the above authentication parameters are provided valid values. 
+It is required that the above authentication parameters are provided valid values.
 Note that the create feed statement does not initiate the flow of data from Twitter into
our AsterixDB instance. Instead, the create feed statement only results in registering the
feed with AsterixDB. The flow of data along a feed is initiated when it is connected
 to a target dataset using the connect feed statement (which we shall revisit later).
 
 
 ####Ingesting an RSS Feed
-RSS (Rich Site Summary); originally RDF Site Summary; often called Really Simple Syndication,
uses a family of standard web feed formats to publish frequently updated information: blog
entries, news headlines, audio, video. An RSS document (called "feed", "web feed", or "channel")
includes full or summarized text, and metadata, like publishing date and author's name. RSS
feeds enable publishers to syndicate data automatically. 
+RSS (Rich Site Summary); originally RDF Site Summary; often called Really Simple Syndication,
uses a family of standard web feed formats to publish frequently updated information: blog
entries, news headlines, audio, video. An RSS document (called "feed", "web feed", or "channel")
includes full or summarized text, and metadata, like publishing date and author's name. RSS
feeds enable publishers to syndicate data automatically.
 
 
 #####Using the "rss_feed" feed adapter#####
-AsterixDB provides a built-in feed adaptor that allows retrieving data given a collection
of RSS end point URLs. As observed in the case of ingesting tweets, it is required to model
an RSS data item using AQL.  
+AsterixDB provides a built-in feed adaptor that allows retrieving data given a collection
of RSS end point URLs. As observed in the case of ingesting tweets, it is required to model
an RSS data item using AQL.
 
         create dataverse feeds if not exists;
         use dataverse feeds;
 
         create type Rss  if not exists as open{
-        	id: string,
-        	title: string,
-        	description: string,
-        	link: string
+            id: string,
+            title: string,
+            description: string,
+            link: string
         };
 
         create dataset RssDataset (Rss)
-		primary key id; 
-		
-
-Next, we define an RSS feed using our built-in adaptor "rss_feed". 
-
-        create feed my_feed using 
-	    rss_feed (
-	       ("type-name"="Rss"),
-	       ("url"="http://rss.cnn.com/rss/edition.rss")
-		);
-
-In the above definition, the configuration parameter "url" can be a comma separated list
that reflects a collection of RSS URLs, where each URL corresponds to an RSS endpoint or a
RSS feed. 
-The "rss_adaptor" retrieves data from each of the specified RSS URLs (comma separated values)
in parallel. 
+        primary key id;
 
 
-So far, we have discussed the mechanism for retrieving data from the external world into
the AsterixDB system. However, the arriving data may require certain pre-processing prior
to being persisted in AsterixDB storage. Next, we discuss how the arriving data can be pre-processed.

-         
+Next, we define an RSS feed using our built-in adaptor "rss_feed".
+
+        create feed my_feed using
+        rss_feed (
+           ("type-name"="Rss"),
+           ("url"="http://rss.cnn.com/rss/edition.rss")
+        );
+
+In the above definition, the configuration parameter "url" can be a comma separated list
that reflects a collection of RSS URLs, where each URL corresponds to an RSS endpoint or a
RSS feed.
+The "rss_adaptor" retrieves data from each of the specified RSS URLs (comma separated values)
in parallel.
+
+
+So far, we have discussed the mechanism for retrieving data from the external world into
the AsterixDB system. However, the arriving data may require certain pre-processing prior
to being persisted in AsterixDB storage. Next, we discuss how the arriving data can be pre-processed.
+
 
 
 ## <a id="PreprocessingCollectedData">Preprocessing Collected Data</a> ###
@@ -154,7 +154,7 @@
 reason about an AQL UDF and involve the use of indexes during
 its invocation.
 
-We consider an example transformation of a raw tweet into its lightweight version - ProcessedTweet
- which is defined next. 
+We consider an example transformation of a raw tweet into its lightweight version - ProcessedTweet
- which is defined next.
 
         create type ProcessedTweet if not exists as open {
             id: string,
@@ -165,11 +165,11 @@
             country: string,
             topics: [string]
         };
-        
-        
+
+
 The processing required in transforming a collected tweet to its lighter version (of type
ProcessedTweet) involves extracting the topics or hash-tags (if any) in a tweet
 and collecting them in the referred-topics attribute for the tweet.
-Additionally, the latitude and longitude values (doubles) are combined into the spatial point
type. Note that spatial data types are considered as first class citizens that come with the
support for creating indexes. Next we show a revised version of our example TwitterFeed that
involves the use of a UDF. We assume that the UDF that contains the transformation logic into
a ProcessedTweet is avaialable as a Java UDF inside an AsterixDB library named 'testlib'.
We defer the writing of a Java UDF and its installation as part of an AsterixDB library to
a later section of this document. 
+Additionally, the latitude and longitude values (doubles) are combined into the spatial point
type. Note that spatial data types are considered as first class citizens that come with the
support for creating indexes. Next we show a revised version of our example TwitterFeed that
involves the use of a UDF. We assume that the UDF that contains the transformation logic into
a ProcessedTweet is avaialable as a Java UDF inside an AsterixDB library named 'testlib'.
We defer the writing of a Java UDF and its installation as part of an AsterixDB library to
a later section of this document.
 
         create feed ProcessedTwitterFeed if not exists
         using "push_twitter"
@@ -201,12 +201,12 @@
 have an associated UDF to allow for any subsequent processing,
 can be persisted into a dataset, and/or can be made to derive other
 secondary feeds to form a cascade network. A primary feed and a
-dependent secondary feed form a hierarchy. As an example, we next show an 
+dependent secondary feed form a hierarchy. As an example, we next show an
 example AQL statement that redefines the previous feed—
 ProcessedTwitterFeed in terms of their
 respective parent feed (TwitterFeed).
 
-        create secondary feed ProcessedTwitterFeed from feed TwitterFeed 
+        create secondary feed ProcessedTwitterFeed from feed TwitterFeed
         apply function testlib#addFeatures;
 
 
@@ -242,7 +242,7 @@
 to do so, the end user makes another use of the connect feed statement.
 A logical view of the continuous flow of data established by
 connecting the feeds to their respective target datasets is shown in
-Figure 8. 
+Figure 8.
 
 The flow of data from a feed into a dataset can be terminated
 explicitly by use of the disconnect feed statement.
@@ -278,20 +278,20 @@
 
 
 
-####Policy Parameters 
+####Policy Parameters
 
 
 - *excess.records.spill*: Set to true if records that cannot be processed by an operator
for lack of resources (referred to as excess records hereafter) should be persisted to the
local disk for deferred processing. (Default: false)
 
-- *excess.records.discard*: Set to true if excess records should be discarded. (Default:
false) 
+- *excess.records.discard*: Set to true if excess records should be discarded. (Default:
false)
 
-- *excess.records.throttle*: Set to true if rate of arrival of records is required to be
reduced in an adaptive manner to prevent having any excess records (Default: false) 
+- *excess.records.throttle*: Set to true if rate of arrival of records is required to be
reduced in an adaptive manner to prevent having any excess records (Default: false)
 
-- *excess.records.elastic*: Set to true if the system should attempt to resolve resource
bottlenecks by re-structuring and/or rescheduling the feed ingestion pipeline. (Default: false)

+- *excess.records.elastic*: Set to true if the system should attempt to resolve resource
bottlenecks by re-structuring and/or rescheduling the feed ingestion pipeline. (Default: false)
 
-- *recover.soft.failure*:  Set to true if the feed must attempt to survive any runtime exception.
A false value permits an early termination of a feed in such an event. (Default: true) 
+- *recover.soft.failure*:  Set to true if the feed must attempt to survive any runtime exception.
A false value permits an early termination of a feed in such an event. (Default: true)
 
-- *recover.soft.failure*:  Set to true if the feed must attempt to survive a hardware failures
(loss of AsterixDB node(s)). A false value permits the early termination of a feed in the
event of a hardware failure (Default: false) 
+- *recover.soft.failure*:  Set to true if the feed must attempt to survive a hardware failures
(loss of AsterixDB node(s)). A false value permits the early termination of a feed in the
event of a hardware failure (Default: false)
 
 Note that the end user may choose to form a custom policy. E.g.
 it is possible in AsterixDB to create a custom policy that spills excess
@@ -364,40 +364,40 @@
 ## <a id="CreatingAnAsterixDBLibrary">Creating an AsterixDB Library</a> ###
 
 We need to install our Java UDF so that we may use it in AQL statements/queries. An AsterixDB
library has a pre-defined structure which is as follows.
-	
 
-- jar file: A jar file that would contain the class files for your UDF source code. 
+
+- jar file: A jar file that would contain the class files for your UDF source code.
 - library descriptor.xml:  This is a descriptor that provide meta-information about the library.
 
-	    <externalLibrary xmlns="library">
-    		<language>JAVA</language>
-    		<libraryFunctions>
-    			<libraryFunction>
-    				<function_type>SCALAR</function_type>
-    				<name>addFeatures</name>
-    				<arguments>Tweet</arguments>
-    				<return_type>ProcessedTweet</return_type>
-    				<definition>edu.uci.ics.asterix.external.library.AddHashTagsFactory
-    				</definition>
-    			</libraryFunction>
-    		</libraryFunctions>
-    	</externalLibrary>
+        <externalLibrary xmlns="library">
+            <language>JAVA</language>
+            <libraryFunctions>
+                <libraryFunction>
+                    <function_type>SCALAR</function_type>
+                    <name>addFeatures</name>
+                    <arguments>Tweet</arguments>
+                    <return_type>ProcessedTweet</return_type>
+                    <definition>edu.uci.ics.asterix.external.library.AddHashTagsFactory
+                    </definition>
+                </libraryFunction>
+            </libraryFunctions>
+        </externalLibrary>
 
 
 - lib: other dependency jars
 
-If the Java UDF requires additional dependency jars, you may add them under a "lib" folder
is required. 
+If the Java UDF requires additional dependency jars, you may add them under a "lib" folder
is required.
 
 We create a zip bundle that contains the jar file and the library descriptor xml file. The
zip would have the following structure.
 
-	$ unzip -l ./tweetlib.zip 
-	Archive:  ./tweetlib.zip
-  	Length     Date   Time    Name
- 	--------    ----   ----    ----
-   	760817  04-23-14 17:16   hash-tags.jar
-    405     04-23-14 17:16   tweet.xml
- 	--------                   -------
-   	761222                   2 files
+    $ unzip -l ./tweetlib.zip
+    Archive:  ./tweetlib.zip
+      Length     Date   Time    Name
+     --------    ----   ----    ----
+       760817  04-23-14 17:16   hash-tags.jar
+          405  04-23-14 17:16   tweet.xml
+     --------                   -------
+       761222                   2 files
 
 
 ###Installing an AsterixDB Library###
@@ -407,41 +407,41 @@
 
 - Step 1: Stop the AsterixDB instance if it is in the ACTIVE state.
 
-   		$ managix stop -n my_asterix
-    
+        $ managix stop -n my_asterix
+
 
 - Step 2: Install the library using Managix install command. Just to illustrate, we use the
help command to look up the syntax
 
-	    $ managix help  -cmd install
-    	Installs a library to an asterix instance.
-    	Options
-    	n  Name of Asterix Instance
-    	d  Name of the dataverse under which the library will be installed
-    	l  Name of the library
-    	p  Path to library zip bundle
-	
+        $ managix help  -cmd install
+        Installs a library to an asterix instance.
+        Options
+        n  Name of Asterix Instance
+        d  Name of the dataverse under which the library will be installed
+        l  Name of the library
+        p  Path to library zip bundle
+
 
 Above is a sample output and explains the usage and the required parameters. Each library
has a name and is installed under a dataverse. Recall that we had created a dataverse by the
name - "feeds" prior to  creating our datatypes and dataset. We shall name our library - "testlib".
 
 We assume you have a library zip bundle that needs to be installed.
 To install the library, use the Managix install command. An example is shown below.
 
-	$ managix install -n my_asterix -d feeds -l testlib -p <put the absolute path of the
library zip bundle here> 
+    $ managix install -n my_asterix -d feeds -l testlib -p <put the absolute path of the
library zip bundle here>
 
 You should see the following message:
 
-	INFO: Installed library testlib
+    INFO: Installed library testlib
 
 We shall next start our AsterixDB instance using the start command as shown below.
 
-	$ managix start -n my_asterix
+    $ managix start -n my_asterix
 
 You may now use the AsterixDB library in AQL statements and queries. To look at the installed
artifacts, you may execute the following query at the AsterixDB web-console.
 
-	for $x in dataset Metadata.Function 
-	return $x
+    for $x in dataset Metadata.Function
+    return $x
 
-	for $x in dataset Metadata.Library	
-	return $x
+    for $x in dataset Metadata.Library
+    return $x
 
-Our library is now installed and is ready to be used.  
\ No newline at end of file
+Our library is now installed and is ready to be used.
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
index f77efbf..778b7bf 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
@@ -64,14 +64,14 @@
         this.configuration = configuration;
         TwitterUtil.initializeConfigurationWithAuthInfo(configuration);
         boolean requiredParamsSpecified = validateConfiguration(configuration);
-        if(!requiredParamsSpecified){
-           StringBuilder builder = new StringBuilder();
-           builder.append("One or more parameters are missing from adapter configuration\n");
-           builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
-           builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET + "\n");
-           builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
-           builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET + "\n");
-           throw new Exception(builder.toString());
+        if (!requiredParamsSpecified) {
+            StringBuilder builder = new StringBuilder();
+            builder.append("One or more parameters are missing from adapter configuration\n");
+            builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
+            builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET + "\n");
+            builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
+            builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET + "\n");
+            throw new Exception(builder.toString());
         }
     }
 
@@ -79,7 +79,6 @@
     public ARecordType getAdapterOutputType() {
         return outputType;
     }
-    
 
     @Override
     public boolean isRecordTrackingEnabled() {
@@ -92,12 +91,12 @@
     }
 
     private boolean validateConfiguration(Map<String, String> configuration) {
-        String consumerKey  = configuration.get(AuthenticationConstants.OAUTH_CONSUMER_KEY);
-        String consumerSecret  = configuration.get(AuthenticationConstants.OAUTH_CONSUMER_SECRET);
-        String accessToken  = configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN);
-        String tokenSecret  = configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET);
-        
-        if(consumerKey == null  || consumerSecret == null || accessToken == null || tokenSecret
== null){
+        String consumerKey = configuration.get(AuthenticationConstants.OAUTH_CONSUMER_KEY);
+        String consumerSecret = configuration.get(AuthenticationConstants.OAUTH_CONSUMER_SECRET);
+        String accessToken = configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN);
+        String tokenSecret = configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET);
+
+        if (consumerKey == null || consumerSecret == null || accessToken == null || tokenSecret
== null) {
             return false;
         }
         return true;
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
index 2737582..f9a9ccf 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
@@ -31,7 +31,6 @@
 
 public class TwitterUtil {
 
-
     private static Logger LOGGER = Logger.getLogger(TwitterUtil.class.getName());
 
     public static class ConfigurationConstants {
@@ -83,20 +82,20 @@
     public static Twitter getTwitterService(Map<String, String> configuration) {
         ConfigurationBuilder cb = getAuthConfiguration(configuration);
         TwitterFactory tf = null;
-        try{
-          tf = new TwitterFactory(cb.build());
-        } catch (Exception e){
-         if (LOGGER.isLoggable(Level.WARNING)){
-            StringBuilder builder = new StringBuilder();
-            builder.append("Twitter Adapter requires the following config parameters\n");
-            builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
-            builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET + "\n");
-            builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
-            builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET + "\n");
-            LOGGER.warning(builder.toString()); 
-            LOGGER.warning("Unable to configure Twitter adapter due to incomplete/incorrect
authentication credentials");
-            LOGGER.warning("For details on how to obtain OAuth authentication token, visit
https://dev.twitter.com/oauth/overview/application-owner-access-tokens");
-         }  
+        try {
+            tf = new TwitterFactory(cb.build());
+        } catch (Exception e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                StringBuilder builder = new StringBuilder();
+                builder.append("Twitter Adapter requires the following config parameters\n");
+                builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
+                builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET + "\n");
+                builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
+                builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET + "\n");
+                LOGGER.warning(builder.toString());
+                LOGGER.warning("Unable to configure Twitter adapter due to incomplete/incorrect
authentication credentials");
+                LOGGER.warning("For details on how to obtain OAuth authentication token,
visit https://dev.twitter.com/oauth/overview/application-owner-access-tokens");
+            }
         }
         Twitter twitter = tf.getInstance();
         return twitter;
@@ -152,9 +151,9 @@
                     break;
             }
         } catch (Exception e) {
-            if(LOGGER.isLoggable(Level.WARNING)){
-                LOGGER.warning("unable to load authentication credentials from auth.properties
file" + 
-             "credential information will be obtained from adapter's configuration");
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("unable to load authentication credentials from auth.properties
file"
+                        + "credential information will be obtained from adapter's configuration");
             }
         }
     }

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/357
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I9b01a2cacb2a8efe0a4c0bb97436a9c59edb8e42
Gerrit-PatchSet: 5
Gerrit-Project: asterixdb
Gerrit-Branch: master
Gerrit-Owner: Till Westmann <tillw@apache.org>
Gerrit-Reviewer: Ian Maxon <imaxon@apache.org>
Gerrit-Reviewer: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Raman Grover <ramangrover29@gmail.com>

Mime
View raw message