hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maha...@apache.org
Subject svn commit: r1104635 - in /hadoop/mapreduce/branches/MR-279: ./ mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/ yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/ yarn/yarn-common/src/main/java/...
Date Wed, 18 May 2011 00:06:52 GMT
Author: mahadev
Date: Wed May 18 00:06:51 2011
New Revision: 1104635

URL: http://svn.apache.org/viewvc?rev=1104635&view=rev
Log:
MAPREDUCE-2500. PB factories are not thread safe (siddharth seth via mahadev)

Modified:
    hadoop/mapreduce/branches/MR-279/CHANGES.txt
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/YarnRemoteExceptionFactoryPBImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java

Modified: hadoop/mapreduce/branches/MR-279/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/CHANGES.txt?rev=1104635&r1=1104634&r2=1104635&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/CHANGES.txt (original)
+++ hadoop/mapreduce/branches/MR-279/CHANGES.txt Wed May 18 00:06:51 2011
@@ -4,6 +4,8 @@ Trunk (unreleased changes)
 
   MAPREDUCE-279
 
+    MAPREDUCE-2500. PB factories are not thread safe (siddharth seth via mahadev) 
+
     Fix the tests to use jvm fork mode to avoid errors in shutting down
     services (sidharth seth)
 

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java?rev=1104635&r1=1104634&r2=1104635&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
(original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
Wed May 18 00:06:51 2011
@@ -24,8 +24,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.JobPriority;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapreduce.JobStatus.State;
@@ -49,10 +47,17 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.NodeManagerInfo;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
+import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 
 public class TypeConverter {
 
+  private static RecordFactory recordFactory;
+  
+  static {
+    recordFactory = RecordFactoryProvider.getRecordFactory(null);
+  }
+
   public static org.apache.hadoop.mapred.JobID fromYarn(JobId id) {
     String identifier = fromClusterTimeStamp(id.getAppId().getClusterTimestamp());
     return new org.apache.hadoop.mapred.JobID(identifier, id.getId());
@@ -65,10 +70,10 @@ public class TypeConverter {
   }
 
   public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) {
-    JobId jobId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
+    JobId jobId = recordFactory.newRecordInstance(JobId.class);
     jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid
     
-    ApplicationId appId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class);
+    ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
     appId.setId(id.getId());
     appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier()));
     jobId.setAppId(appId);
@@ -113,7 +118,7 @@ public class TypeConverter {
   }
 
   public static TaskId toYarn(org.apache.hadoop.mapreduce.TaskID id) {
-    TaskId taskId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
+    TaskId taskId = recordFactory.newRecordInstance(TaskId.class);
     taskId.setId(id.getId());
     taskId.setTaskType(toYarn(id.getTaskType()));
     taskId.setJobId(toYarn(id.getJobID()));
@@ -184,7 +189,7 @@ public class TypeConverter {
 
   public static TaskAttemptId toYarn(
       org.apache.hadoop.mapred.TaskAttemptID id) {
-    TaskAttemptId taskAttemptId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
+    TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class);
     taskAttemptId.setTaskId(toYarn(id.getTaskID()));
     taskAttemptId.setId(id.getId());
     return taskAttemptId;
@@ -192,7 +197,7 @@ public class TypeConverter {
 
   public static TaskAttemptId toYarn(
       org.apache.hadoop.mapreduce.TaskAttemptID id) {
-    TaskAttemptId taskAttemptId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
+    TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class);
     taskAttemptId.setTaskId(toYarn(id.getTaskID()));
     taskAttemptId.setId(id.getId());
     return taskAttemptId;
@@ -214,15 +219,15 @@ public class TypeConverter {
   }
 
   public static Counters toYarn(org.apache.hadoop.mapred.Counters counters) {
-    Counters yCntrs = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Counters.class);
+    Counters yCntrs = recordFactory.newRecordInstance(Counters.class);
     yCntrs.addAllCounterGroups(new HashMap<String, CounterGroup>());
     for (org.apache.hadoop.mapred.Counters.Group grp : counters) {
-      CounterGroup yGrp = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(CounterGroup.class);
+      CounterGroup yGrp = recordFactory.newRecordInstance(CounterGroup.class);
       yGrp.setName(grp.getName());
       yGrp.setDisplayName(grp.getDisplayName());
       yGrp.addAllCounters(new HashMap<String, Counter>());
       for (org.apache.hadoop.mapred.Counters.Counter cntr : grp) {
-        Counter yCntr = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Counter.class);
+        Counter yCntr = recordFactory.newRecordInstance(Counter.class);
         yCntr.setName(cntr.getName());
         yCntr.setDisplayName(cntr.getDisplayName());
         yCntr.setValue(cntr.getValue());
@@ -234,15 +239,15 @@ public class TypeConverter {
   }
 
   public static Counters toYarn(org.apache.hadoop.mapreduce.Counters counters) {
-    Counters yCntrs = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Counters.class);
+    Counters yCntrs = recordFactory.newRecordInstance(Counters.class);
     yCntrs.addAllCounterGroups(new HashMap<String, CounterGroup>());
     for (org.apache.hadoop.mapreduce.CounterGroup grp : counters) {
-      CounterGroup yGrp = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(CounterGroup.class);
+      CounterGroup yGrp = recordFactory.newRecordInstance(CounterGroup.class);
       yGrp.setName(grp.getName());
       yGrp.setDisplayName(grp.getDisplayName());
       yGrp.addAllCounters(new HashMap<String, Counter>());
       for (org.apache.hadoop.mapreduce.Counter cntr : grp) {
-        Counter yCntr = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Counter.class);
+        Counter yCntr = recordFactory.newRecordInstance(Counter.class);
         yCntr.setName(cntr.getName());
         yCntr.setDisplayName(cntr.getDisplayName());
         yCntr.setValue(cntr.getValue());

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java?rev=1104635&r1=1104634&r2=1104635&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java
(original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java
Wed May 18 00:06:51 2011
@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.factories
 
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.YarnException;
@@ -34,7 +34,7 @@ public class RecordFactoryPBImpl impleme
 
   private static final RecordFactoryPBImpl self = new RecordFactoryPBImpl();
   private Configuration localConf = new Configuration();
-  private Map<Class<?>, Constructor<?>> cache = new HashMap<Class<?>,
Constructor<?>>();
+  private ConcurrentMap<Class<?>, Constructor<?>> cache = new ConcurrentHashMap<Class<?>,
Constructor<?>>();
 
   private RecordFactoryPBImpl() {
   }
@@ -43,11 +43,12 @@ public class RecordFactoryPBImpl impleme
     return self;
   }
   
+  @SuppressWarnings("unchecked")
   @Override
   public <T> T newRecordInstance(Class<T> clazz) throws YarnException {
     
-    Constructor<?> constructor = null;
-    if (cache.get(clazz) == null) {
+    Constructor<?> constructor = cache.get(clazz);
+    if (constructor == null) {
       Class<?> pbClazz = null;
       try {
         pbClazz = localConf.getClassByName(getPBImplClassName(clazz));
@@ -56,14 +57,12 @@ public class RecordFactoryPBImpl impleme
             + getPBImplClassName(clazz) + "]", e);
       }
       try {
-        constructor = pbClazz.getConstructor(null);
+        constructor = pbClazz.getConstructor();
         constructor.setAccessible(true);
-        cache.put(clazz, constructor);
+        cache.putIfAbsent(clazz, constructor);
       } catch (NoSuchMethodException e) {
         throw new YarnException("Could not find 0 argument constructor", e);
       }
-    } else {
-      constructor = cache.get(clazz);
     }
     try {
       Object retObject = constructor.newInstance();

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java?rev=1104635&r1=1104634&r2=1104635&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java
(original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java
Wed May 18 00:06:51 2011
@@ -1,10 +1,28 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
 package org.apache.hadoop.yarn.factories.impl.pb;
 
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
 import java.net.InetSocketAddress;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.YarnException;
@@ -17,7 +35,7 @@ public class RpcClientFactoryPBImpl impl
   
   private static final RpcClientFactoryPBImpl self = new RpcClientFactoryPBImpl();
   private Configuration localConf = new Configuration();
-  private Map<Class<?>, Constructor<?>> cache = new HashMap<Class<?>,
Constructor<?>>();
+  private ConcurrentMap<Class<?>, Constructor<?>> cache = new ConcurrentHashMap<Class<?>,
Constructor<?>>();
   
   public static RpcClientFactoryPBImpl get() {
     return RpcClientFactoryPBImpl.self;
@@ -28,8 +46,8 @@ public class RpcClientFactoryPBImpl impl
   
   public Object getClient(Class<?> protocol, long clientVersion, InetSocketAddress
addr, Configuration conf) throws YarnException {
    
-    Constructor<?> constructor = null;
-    if (cache.get(protocol) == null) {
+    Constructor<?> constructor = cache.get(protocol);
+    if (constructor == null) {
       Class<?> pbClazz = null;
       try {
         pbClazz = localConf.getClassByName(getPBImplClassName(protocol));
@@ -40,14 +58,11 @@ public class RpcClientFactoryPBImpl impl
       try {
         constructor = pbClazz.getConstructor(Long.TYPE, InetSocketAddress.class, Configuration.class);
         constructor.setAccessible(true);
-        cache.put(protocol, constructor);
+        cache.putIfAbsent(protocol, constructor);
       } catch (NoSuchMethodException e) {
         throw new YarnException("Could not find constructor with params: " + Long.TYPE +
", " + InetSocketAddress.class + ", " + Configuration.class, e);
       }
-    } else {
-      constructor = cache.get(protocol);
     }
-
     try {
       Object retObject = constructor.newInstance(clientVersion, addr, conf);
       return retObject;

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java?rev=1104635&r1=1104634&r2=1104635&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java
(original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java
Wed May 18 00:06:51 2011
@@ -1,3 +1,21 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
 package org.apache.hadoop.yarn.factories.impl.pb;
 
 import java.io.IOException;
@@ -5,8 +23,8 @@ import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.net.InetSocketAddress;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.RPC;
@@ -29,8 +47,8 @@ public class RpcServerFactoryPBImpl impl
   private static final RpcServerFactoryPBImpl self = new RpcServerFactoryPBImpl();
 
   private Configuration localConf = new Configuration();
-  private Map<Class<?>, Constructor<?>> serviceCache = new HashMap<Class<?>,
Constructor<?>>();
-  private Map<Class<?>, Method> protoCache = new HashMap<Class<?>, Method>();
+  private ConcurrentMap<Class<?>, Constructor<?>> serviceCache = new ConcurrentHashMap<Class<?>,
Constructor<?>>();
+  private ConcurrentMap<Class<?>, Method> protoCache = new ConcurrentHashMap<Class<?>,
Method>();
   
   public static RpcServerFactoryPBImpl get() {
     return RpcServerFactoryPBImpl.self;
@@ -46,8 +64,8 @@ public class RpcServerFactoryPBImpl impl
       SecretManager<? extends TokenIdentifier> secretManager)
       throws YarnException {
     
-    Constructor<?> constructor = null;
-    if (serviceCache.get(protocol) == null) {
+    Constructor<?> constructor = serviceCache.get(protocol);
+    if (constructor == null) {
       Class<?> pbServiceImplClazz = null;
       try {
         pbServiceImplClazz = localConf
@@ -59,14 +77,12 @@ public class RpcServerFactoryPBImpl impl
       try {
         constructor = pbServiceImplClazz.getConstructor(protocol);
         constructor.setAccessible(true);
-        serviceCache.put(protocol, constructor);
+        serviceCache.putIfAbsent(protocol, constructor);
       } catch (NoSuchMethodException e) {
         throw new YarnException("Could not find constructor with params: "
             + Long.TYPE + ", " + InetSocketAddress.class + ", "
             + Configuration.class, e);
       }
-    } else {
-      constructor = serviceCache.get(protocol);
     }
     
     Object service = null;
@@ -80,8 +96,8 @@ public class RpcServerFactoryPBImpl impl
       throw new YarnException(e);
     }
 
-    Method method = null;
-    if (protoCache.get(protocol) == null) {
+    Method method = protoCache.get(protocol);
+    if (method == null) {
       Class<?> protoClazz = null;
       try {
         protoClazz = localConf.getClassByName(getProtoClassName(protocol));
@@ -92,12 +108,10 @@ public class RpcServerFactoryPBImpl impl
       try {
         method = protoClazz.getMethod("newReflectiveBlockingService", service.getClass().getInterfaces()[0]);
         method.setAccessible(true);
-        protoCache.put(protocol, method);
+        protoCache.putIfAbsent(protocol, method);
       } catch (NoSuchMethodException e) {
         throw new YarnException(e);
       }
-    } else {
-      method = protoCache.get(protocol);
     }
     
     try {

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/YarnRemoteExceptionFactoryPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/YarnRemoteExceptionFactoryPBImpl.java?rev=1104635&r1=1104634&r2=1104635&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/YarnRemoteExceptionFactoryPBImpl.java
(original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/YarnRemoteExceptionFactoryPBImpl.java
Wed May 18 00:06:51 2011
@@ -1,3 +1,21 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
 package org.apache.hadoop.yarn.factories.impl.pb;
 
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java?rev=1104635&r1=1104634&r2=1104635&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java
(original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java
Wed May 18 00:06:51 2011
@@ -15,6 +15,12 @@ public class RecordFactoryProvider {
   
   public static String RECORD_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.record.factory.class";
   
+  private static Configuration defaultConf;
+  
+  static {
+    defaultConf = new Configuration();
+  }
+  
   private RecordFactoryProvider() {
   }
   
@@ -22,7 +28,7 @@ public class RecordFactoryProvider {
     if (conf == null) {
       //Assuming the default configuration has the correct factories set.
       //Users can specify a particular factory by providing a configuration.
-      conf = new Configuration();
+      conf = defaultConf;
     }
     String recordFactoryClassName = conf.get(RECORD_FACTORY_CLASS_KEY);
     if (recordFactoryClassName == null) {



Mime
View raw message