package eu.dnetlib.data.actionmanager.blackboard;

import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import javax.annotation.Resource;

import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import eu.dnetlib.data.hadoop.HadoopIsClient;
import eu.dnetlib.enabling.locators.UniqueServiceLocator;
import eu.dnetlib.enabling.tools.blackboard.BlackboardClientHandler;
import eu.dnetlib.enabling.tools.blackboard.BlackboardJob;
import eu.dnetlib.enabling.tools.blackboard.BlackboardJobRegistry;
import eu.dnetlib.rmi.data.hadoop.ClusterName;
import eu.dnetlib.rmi.data.hadoop.HadoopBlackboardActions;
import eu.dnetlib.rmi.data.hadoop.HadoopService;
import eu.dnetlib.rmi.data.hadoop.HadoopServiceException;
import eu.dnetlib.rmi.data.hadoop.actionmanager.ActionManagerException;
import eu.dnetlib.rmi.data.hadoop.actionmanager.ActionManagerSet;
import eu.dnetlib.rmi.enabling.ISLookUpException;
import eu.dnetlib.rmi.enabling.ISLookUpService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.beans.factory.annotation.Value;

public class ActionManagerJobLauncher {

	public static final String ALL_SETS = "__ALL__";
	public static final String SEMICOLON = ";";
	public static final String COLON = ",";
	public static final String SEQFILE_INPUTFORMAT = "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat";
	private final static String UPDATE_ACTION_PROFILE = "for $x in "
			+ "/RESOURCE_PROFILE[.//RESOURCE_TYPE/@value = 'ActionManagerSetDSResourceType' and .//SET/@id = '%s'] "
			+ "return update delete $x//RAW_SETS/EXPIRED[@id = '%s']";
	private static final Log log = LogFactory.getLog(ActionManagerJobLauncher.class); // NOPMD by marko on 11/24/08 5:02 PM
	private final Executor executor = Executors.newCachedThreadPool();

	@Resource
	private UniqueServiceLocator serviceLocator;

	private HadoopIsClient isClient;

	/**
	 * blackboard handler.
	 */
	@Resource
	private BlackboardClientHandler blackboardClientHandler;

	/**
	 * blackboard job registry.
	 */
	@Resource
	private BlackboardJobRegistry jobRegistry;

	@Value("${services.actionmanager.promote.mapper.class}")
	private String promoteJobMapperClass;

	public void executePromoteFromHDFSJob(final Set<String> sets, final String targetTable, final ActionManagerBlackboardCallback callback) throws ActionManagerException, ISLookUpException {
		log.info("Starting commit m/r job; sets=" + sets);

		final List<String> paths = Lists.newArrayList();
		if (sets == null || sets.isEmpty()) {
			paths.addAll(isClient.listSets().stream().map(ActionManagerSet::getPathToLatest).collect(Collectors.toList()));

		} else {
			for (String setId : sets) {
				if (isClient.existsSet(setId)) {
					final ActionManagerSet set = isClient.getSet(setId);
					paths.add(set.getPathToLatest());
				} else {
					log.error("Invalid set " + setId);
					throw new ActionManagerException("Invalid set " + setId);
				}
			}
		}

		final HadoopService hadoopService = serviceLocator.getService(HadoopService.class);
		final List<String> existingPaths = Lists.newArrayList(Iterables.filter(paths, path -> {

			try {
				final boolean exist = hadoopService.existHdfsPath(ClusterName.DM.toString(), path);
				if (!exist) {
					log.warn(String.format("path '%s' doesn't exist on DM cluster'", path));
				}
				return exist;
			} catch (HadoopServiceException e) {
				log.error(e);
				return false;
			}
		}));

		final Map<String, String> params = Maps.newHashMap();
		params.put("mapred.input.dir.formats", Joiner.on(COLON).join(Iterables.transform(existingPaths, path -> path + SEMICOLON + SEQFILE_INPUTFORMAT)));
		params.put("mapred.input.dir.mappers", Joiner.on(COLON).join(Iterables.transform(existingPaths, path -> path + SEMICOLON + promoteJobMapperClass)));

		params.put("hbase.mapred.outputtable", targetTable);
		params.put("hbase.mapreduce.outputtable", targetTable);

		log.info("promoting HDFS rawsets: " + paths);

		executeHDFS("promoteMultipleActionSetsJob", params, callback);
	}

	private void executeHDFS(final String jobName, final Map<String, String> params, final ActionManagerBlackboardCallback callback) throws ActionManagerException {

		if (params == null || params.isEmpty()) { throw new ActionManagerException("Missing HDFS paths"); }

		executor.execute(() -> {
			try {
				final String serviceId = findHadoopServiceProfileID();
				final BlackboardJob bbJob = blackboardClientHandler.newJob(serviceId);

				bbJob.setAction(HadoopBlackboardActions.SUBMIT_MAPREDUCE_JOB.toString());
				bbJob.getParameters().put("job.name", jobName);
				bbJob.getParameters().put("cluster", ClusterName.DM.toString());
				bbJob.getParameters().putAll(params);

				jobRegistry.registerJobListener(bbJob, new ActionManagerBlackboardJobListener(callback));
				blackboardClientHandler.assign(bbJob);
			} catch (Exception e) {
				log.error("Error launching m/r job: " + jobName, e);
				throw new RuntimeException("Error launching m/r job: " + jobName, e);
			}
		});
	}

	private String findHadoopServiceProfileID() throws Exception {
		return serviceLocator.getService(ISLookUpService.class).getResourceProfileByQuery(
				"collection('/db/DRIVER/ServiceResources/HadoopServiceResourceType')//RESOURCE_IDENTIFIER/@value/string()");
	}

	public HadoopIsClient getIsClient() {
		return isClient;
	}

	@Required
	public void setIsClient(final HadoopIsClient isClient) {
		this.isClient = isClient;
	}

}
