flink-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "mingleizhang (JIRA)" <j...@apache.org>
Subject [jira] [Updated] (FLINK-7118) Remove hadoop1.x code in HadoopUtils
Date Fri, 07 Jul 2017 01:20:00 GMT

     [ https://issues.apache.org/jira/browse/FLINK-7118?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]

mingleizhang updated FLINK-7118:
--------------------------------
    Description: 
Since flink no longer support hadoop 1.x version, we should remove it. 

	
{code:java}
public static JobContext instantiateJobContext(Configuration configuration, JobID jobId) throws
Exception {
		try {
			Class<?> clazz;
			// for Hadoop 1.xx
			if(JobContext.class.isInterface()) {
				clazz = Class.forName("org.apache.hadoop.mapreduce.task.JobContextImpl", true, Thread.currentThread().getContextClassLoader());
			}
			// for Hadoop 2.xx
			else {
				clazz = Class.forName("org.apache.hadoop.mapreduce.JobContext", true, Thread.currentThread().getContextClassLoader());
			}
			Constructor<?> constructor = clazz.getConstructor(Configuration.class, JobID.class);
			JobContext context = (JobContext) constructor.newInstance(configuration, jobId);
			
			return context;
		} catch(Exception e) {
			throw new Exception("Could not create instance of JobContext.");
		}
	}
{code}

And 


{code:java}
	public static TaskAttemptContext instantiateTaskAttemptContext(Configuration configuration,
 TaskAttemptID taskAttemptID) throws Exception {
		try {
			Class<?> clazz;
			// for Hadoop 1.xx
			if(JobContext.class.isInterface()) {
				clazz = Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
			}
			// for Hadoop 2.xx
			else {
				clazz = Class.forName("org.apache.hadoop.mapreduce.TaskAttemptContext");
			}
			Constructor<?> constructor = clazz.getConstructor(Configuration.class, TaskAttemptID.class);
			TaskAttemptContext context = (TaskAttemptContext) constructor.newInstance(configuration,
taskAttemptID);
			
			return context;
		} catch(Exception e) {
			throw new Exception("Could not create instance of TaskAttemptContext.");
		}
	}
{code}


  was:
Since flink no longer support hadoop 1.x version, so, we should remove it. 

	
{code:java}
public static JobContext instantiateJobContext(Configuration configuration, JobID jobId) throws
Exception {
		try {
			Class<?> clazz;
			// for Hadoop 1.xx
			if(JobContext.class.isInterface()) {
				clazz = Class.forName("org.apache.hadoop.mapreduce.task.JobContextImpl", true, Thread.currentThread().getContextClassLoader());
			}
			// for Hadoop 2.xx
			else {
				clazz = Class.forName("org.apache.hadoop.mapreduce.JobContext", true, Thread.currentThread().getContextClassLoader());
			}
			Constructor<?> constructor = clazz.getConstructor(Configuration.class, JobID.class);
			JobContext context = (JobContext) constructor.newInstance(configuration, jobId);
			
			return context;
		} catch(Exception e) {
			throw new Exception("Could not create instance of JobContext.");
		}
	}
{code}

And 


{code:java}
	public static TaskAttemptContext instantiateTaskAttemptContext(Configuration configuration,
 TaskAttemptID taskAttemptID) throws Exception {
		try {
			Class<?> clazz;
			// for Hadoop 1.xx
			if(JobContext.class.isInterface()) {
				clazz = Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
			}
			// for Hadoop 2.xx
			else {
				clazz = Class.forName("org.apache.hadoop.mapreduce.TaskAttemptContext");
			}
			Constructor<?> constructor = clazz.getConstructor(Configuration.class, TaskAttemptID.class);
			TaskAttemptContext context = (TaskAttemptContext) constructor.newInstance(configuration,
taskAttemptID);
			
			return context;
		} catch(Exception e) {
			throw new Exception("Could not create instance of TaskAttemptContext.");
		}
	}
{code}



> Remove hadoop1.x code in HadoopUtils
> ------------------------------------
>
>                 Key: FLINK-7118
>                 URL: https://issues.apache.org/jira/browse/FLINK-7118
>             Project: Flink
>          Issue Type: Bug
>          Components: Java API
>            Reporter: mingleizhang
>            Assignee: mingleizhang
>
> Since flink no longer support hadoop 1.x version, we should remove it. 
> 	
> {code:java}
> public static JobContext instantiateJobContext(Configuration configuration, JobID jobId)
throws Exception {
> 		try {
> 			Class<?> clazz;
> 			// for Hadoop 1.xx
> 			if(JobContext.class.isInterface()) {
> 				clazz = Class.forName("org.apache.hadoop.mapreduce.task.JobContextImpl", true, Thread.currentThread().getContextClassLoader());
> 			}
> 			// for Hadoop 2.xx
> 			else {
> 				clazz = Class.forName("org.apache.hadoop.mapreduce.JobContext", true, Thread.currentThread().getContextClassLoader());
> 			}
> 			Constructor<?> constructor = clazz.getConstructor(Configuration.class, JobID.class);
> 			JobContext context = (JobContext) constructor.newInstance(configuration, jobId);
> 			
> 			return context;
> 		} catch(Exception e) {
> 			throw new Exception("Could not create instance of JobContext.");
> 		}
> 	}
> {code}
> And 
> {code:java}
> 	public static TaskAttemptContext instantiateTaskAttemptContext(Configuration configuration,
 TaskAttemptID taskAttemptID) throws Exception {
> 		try {
> 			Class<?> clazz;
> 			// for Hadoop 1.xx
> 			if(JobContext.class.isInterface()) {
> 				clazz = Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
> 			}
> 			// for Hadoop 2.xx
> 			else {
> 				clazz = Class.forName("org.apache.hadoop.mapreduce.TaskAttemptContext");
> 			}
> 			Constructor<?> constructor = clazz.getConstructor(Configuration.class, TaskAttemptID.class);
> 			TaskAttemptContext context = (TaskAttemptContext) constructor.newInstance(configuration,
taskAttemptID);
> 			
> 			return context;
> 		} catch(Exception e) {
> 			throw new Exception("Could not create instance of TaskAttemptContext.");
> 		}
> 	}
> {code}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Mime
View raw message