package eu.dnetlib.data.hadoop.blackboard;

import java.io.IOException;
import java.io.StringWriter;
import java.util.Map;
import java.util.Map.Entry;

import eu.dnetlib.miscutils.functional.xml.XMLIndenter;
import eu.dnetlib.rmi.data.hadoop.ClusterName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;

import eu.dnetlib.data.hadoop.HadoopJob;
import eu.dnetlib.data.hadoop.mapreduce.MapreduceJobMonitor;
import eu.dnetlib.rmi.data.hadoop.HadoopServiceException;
import eu.dnetlib.data.hadoop.utils.JobProfile;
import eu.dnetlib.data.hadoop.utils.ScanFactory;
import eu.dnetlib.data.hadoop.utils.ScanProperties;
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob;
import org.springframework.beans.factory.annotation.Autowired;

public class SubmitMapreduceJobAction extends AbstractSubmitAction {

	/**
	 * logger.
	 */
	private static final Log log = LogFactory.getLog(SubmitMapreduceJobAction.class); // NOPMD by marko on 11/24/08 5:02 PM

	@Autowired
	private ScanFactory scanFactory;

	@Override
	public void submit(final JobCompletion callback, final BlackboardJob bbJob, final String jobName, final JobProfile jobProfile)
			throws HadoopServiceException {

		final ClusterName clusterName = ClusterName.valueOf(bbJob.getParameters().get("cluster"));

		try {
			final JobConf jobConf = prepareJob(getConf(clusterName), jobName, jobProfile, bbJob.getParameters());

			if (!hadoopClientMap.isMapreduceAvailable(clusterName))
				throw new HadoopServiceException("mapreduce not available for cluster: " + clusterName.toString());

			logJobDetails(jobConf);

			final RunningJob runningJob = hadoopClientMap.getJtClient(clusterName).submitJob(jobConf);
			final String jobId = newJobId(clusterName, runningJob.getID().getId());

			jobRegistry.registerJob(HadoopJob.newInstance(jobId, clusterName, jobProfile,
					new MapreduceJobMonitor(runningJob, callback)));

		} catch (final IOException e) {
			throw new HadoopServiceException("error executing hadoop job: " + jobName, e);
		}
	}

	protected JobConf prepareJob(final Configuration configuration, final String jobName, final JobProfile jobProfile, final Map<String, String> parameters)
			throws IOException, HadoopServiceException {

		log.info("creating job: " + jobName);

		final JobConf jobConf = new JobConf(configuration);
		jobConf.setJobName(jobName);
		jobConf.set("dnet.mapred.job.description", jobProfile.getDescription());

		final String jobLib = getJobLib(configuration, jobProfile);
		jobConf.setJar(new Path(jobLib).toString());

		set(jobConf, jobProfile.getJobDefinition());
		set(jobConf, parameters);

		// if we're reading from hbase table then we need also to set a scanner.
		final ScanProperties scanProperties = jobProfile.getScanProperties();
		if (jobProfile.getRequiredParams().contains("hbase.mapreduce.inputtable") && (scanProperties != null)) {
			jobConf.set("hbase.mapreduce.scan", scanFactory.getScan(scanProperties));
		}

		return jobConf;
	}

	protected String getJobLib(final Configuration configuration, final JobProfile jobProfile) throws HadoopServiceException {
		String jobLib = getDefaultLibPath(configuration.get("fs.defaultFS"));

		if (jobProfile.getJobDefinition().containsKey("job.lib")) {
			jobLib = jobProfile.getJobDefinition().get("job.lib");
		}

		if ((jobLib == null) || jobLib.isEmpty()) throw new HadoopServiceException("job.lib must refer to an absolute or relative HDFS path");
		if (!jobLib.startsWith("hdfs://")) {
			jobLib = configuration.get("fs.defaultFS") + jobLib;
		}

		log.info("using job.lib: " + jobLib);
		return jobLib;
	}

	protected void set(final JobConf jobConf, final Map<String, String> properties) {
		for (final Entry<String, String> e : properties.entrySet()) {
			if (checkHdfsProperty(e)) {
				final String v = jobConf.get("fs.defaultFS") + e.getValue();
				e.setValue(v);
			}
			jobConf.set(e.getKey(), e.getValue());
		}
	}

	protected void logJobDetails(final JobConf jobConf) {
		final StringWriter sw = new StringWriter();
		try {
			jobConf.writeXml(sw);
			if (log.isDebugEnabled()) {
				log.debug("\n" + XMLIndenter.indent(sw.toString()));
			}
		} catch (final IOException e) {
			log.warn("unable to log job details: " + jobConf.getJobName());
		}
	}

}
