package eu.dnetlib.data.mapreduce.hbase;

import java.io.IOException;
import java.util.Map.Entry;
import java.util.Properties;

import javax.annotation.Resource;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.springframework.beans.factory.BeanNameAware;

import eu.dnetlib.data.hadoop.config.ClusterName;
import eu.dnetlib.data.hadoop.config.ConfigurationEnumerator;
import eu.dnetlib.data.mapreduce.HadoopJob;

public abstract class AbstractHBaseMapReduceJob extends Configured implements HadoopJob, BeanNameAware {

	protected static final Log log = LogFactory.getLog(AbstractHBaseMapReduceJob.class);

	protected String jobName;

	@Resource
	protected ConfigurationEnumerator configurationEnumerator;

	protected abstract Job setJobDetails(Job job, Properties p) throws Exception;

	@Override
	public Job setJobDetails(ClusterName name, Properties p) {
		try {
			final Job job = createJobCommon(name, p);
			return setJobDetails(job, p);
		} catch (Exception e) {
			throw new RuntimeException("unable to define Job: " + getClass().getSimpleName(), e);
		}
	}

	protected Job createJobCommon(ClusterName name, Properties p) throws IOException {

		Job job = new Job(configurationEnumerator.get(name), getClass().getSimpleName());

		merge(job, p);

		return job;
	}

	protected void merge(final Job job, final Properties p) {
		for (Entry<Object, Object> e : p.entrySet()) {
			job.getConfiguration().set((String) e.getKey(), (String) e.getValue());
		}
	}

	protected void deleteHdfsFile(Job job, Path path) {
		try {
			FileSystem hdfs = FileSystem.get(job.getConfiguration());
			if (hdfs.exists(path)) {
				hdfs.delete(path, true);
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void setBeanName(String name) {
		this.jobName = name;
	}

	@Override
	public String getName() {
		return jobName;
	}

}
