/**
 *
 */
package gr.uoa.di.webui.search;

import eu.dnetlib.api.data.PublisherService;
import eu.dnetlib.api.data.PublisherServiceException;
import eu.dnetlib.api.data.SearchService;
import eu.dnetlib.api.data.SearchServiceException;
import eu.dnetlib.domain.EPR;
import eu.dnetlib.domain.data.BrowseData;
import eu.dnetlib.domain.enabling.Vocabulary;
import eu.dnetlib.domain.functionality.LayoutField;
import eu.dnetlib.domain.functionality.Searchable;
import gr.uoa.di.driver.data.browsedata.BrowseDataUtil;
import gr.uoa.di.driver.enabling.resultset.ResultSet;
import gr.uoa.di.driver.enabling.resultset.ResultSetFactory;
import gr.uoa.di.driver.util.ServiceLocator;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;

import javax.xml.ws.soap.SOAPFaultException;

import org.apache.log4j.Logger;

/**
 * Manages all searches in the the web interface. All elements in query forms
 * are passed to the SearchManager and page results are retuned in a format that
 * is straightforward for jsp pages to render.
 * 
 * @author kiatrop
 */
public class SearchManager {

	// public static final int DEFAULT_PAGE_SIZE = 10;
	public static final int DEFAULT_CACHE_SIZE = 100;

	public static Logger logger = Logger.getLogger(SearchManager.class);

	private CriteriaManager criteriaManager = null;
	private BrowseDataReader browseDataReader = null;
	private DocumentReader reader = null;
	private QueryCache documentQueries = null;
	private int cacheSize = 0;
	// private int pageSize = 0;

	private ServiceLocator<SearchService> searchServiceLocator = null;
	private ServiceLocator<PublisherService> publisherServiceLocator = null;

	private QueryEnhancer enhancer = null;

	public BrowseDataReader getBrowseDataReader() {
		return browseDataReader;
	}

	public void setBrowseDataReader(BrowseDataReader browseDataReader) {
		this.browseDataReader = browseDataReader;
	}

	public ServiceLocator<PublisherService> getPublisherServiceLocator() {
		return publisherServiceLocator;
	}

	public void setPublisherServiceLocator(
			ServiceLocator<PublisherService> publisherServiceLocator) {
		this.publisherServiceLocator = publisherServiceLocator;
	}

	public ServiceLocator<SearchService> getSearchServiceLocator() {
		return searchServiceLocator;
	}

	public void setSearchServiceLocator(
			ServiceLocator<SearchService> searchServiceLocator) {
		this.searchServiceLocator = searchServiceLocator;
	}

	public QueryEnhancer getEnhancer() {
		return enhancer;
	}

	public void setEnhancer(QueryEnhancer enhancer) {
		this.enhancer = enhancer;
	}

	public SearchManager() {
		this.setCacheSize(DEFAULT_CACHE_SIZE); // initializes cache
		// this.setPageSize(DEFAULT_PAGE_SIZE);
	}

	/**
	 * Use an internal cache of the {@link #cacheSize} MRU query results. If the
	 * query result is invalidated (result set update fails) the query is
	 * resubmitted. The returned page points to the spacified <code>page</code>
	 * assuming <code>pageSize</code> documents per page.
	 * <p>
	 * NOTE: Cache only search query results -- never cache result set data!
	 * 
	 * @param query
	 *            The query to execute.
	 * @param pageSize
	 *            The number of documents in the page.
	 * @param page
	 *            The number of the page to return. First page is 1.
	 * @return The requested page.
	 * @throws SearchServiceException
	 */
	public DocumentPage search(String query, int pageSize, int pageNumber)
			throws SearchServiceException {

		query = enhancer.enhanceQuery(query);
		logger.debug("performing search with query = " + query);

		try {
			// lookup query in query cache
			ResultSet<String> rs = documentQueries.get(query);
			if (rs == null) {
				EPR epr = getSearchServiceLocator().getService().search(query);
				rs = ResultSetFactory.createResultSet(epr);
				if (logger.isDebugEnabled()) {
					logger.debug("EPR : " + epr.getEpr());
				}
				documentQueries.put(query, rs);
			}

			// update page from result set
			DocumentPage page = null;
			try {
				page = new DocumentPage(rs, getReader(), pageSize, pageNumber);

			} catch (InvalidatedResultSet irs) {
				rs = reperformSearch(query);

			} catch (SOAPFaultException sfe) {
				rs = reperformSearch(query);
			}

			// second attempt to load page data from new RS
			if (page == null) {
				try {
					page = new DocumentPage(rs, getReader(), pageSize,
							pageNumber);

				} catch (InvalidatedResultSet irs) {
					throw new SearchServiceException(
							"Error reading data from ResultSet.", irs);
				}
			}

			return page;

		} catch (RuntimeException re) {
			throw new SearchServiceException(
					"Error connecting to search service.", re);
		}
	}

	private ResultSet<String> reperformSearch(String query)
			throws SearchServiceException {
		logger.debug("re perform search for query " + query);

		documentQueries.remove(query); // force remove for lru
		EPR epr = getSearchServiceLocator().getService().search(query);
		ResultSet<String> rs = ResultSetFactory.createResultSet(epr);
		documentQueries.put(query, rs);
		return rs;
	}

	public BrowseData refine(String query, List<String> fields)
			throws SearchServiceException {
		logger
				.info("running refine query: " + query + " for fields: "
						+ fields);

		EPR epr = searchServiceLocator.getService().refine(query, fields);
		ResultSet<String> rs = ResultSetFactory.createResultSet(epr);

		if (logger.isDebugEnabled()) {
			logger.debug("EPR : " + epr.getEpr());
		}

		BrowseData data = new BrowseData();
		List<String> list = rs.getElements(1, rs.size());

		for (int i = 0; i < list.size(); i++) {
			String record = list.get(i);
			if (record == null || record.trim().equals("")) {
				continue;
			}

			int count = BrowseDataUtil.parseCount(record);
			String field = BrowseDataUtil.parseField(record);
			String value = BrowseDataUtil.parseValue(record);

			data.addFieldEntry(field, value, count);
		}

		return this.browseDataReader.read(data, false);
	}

	public BrowseData browse(String prefix, String field, boolean alphabetical)
			throws SearchServiceException {
		logger.info("running browse query for field " + field);

		BrowseData data = searchServiceLocator.getService().browse(prefix,
				field);
		BrowseData bd = this.browseDataReader.read(data, alphabetical);

		logger.info("browse data " + bd.getFields().toString());
		return this.browseDataReader.read(data, alphabetical);

	}

	protected DocumentReader getReader() throws SearchServiceException {
		if (reader == null) {
			LayoutManager lm = criteriaManager.getLayoutManager();
			// use all index fields
			List<LayoutField> fields = lm.getIndexLayoutManager()
					.getResultFields();

			// map index field names to searchable names
			Map<String, String> nameMap = new HashMap<String, String>();
			for (LayoutField field : fields) {
				String name = field.getName();
				nameMap.put(name, lm.getNameFromIndexType(name));
			}

			// hardcoded header searchables -- not part of index layout
			LayoutField idField = new LayoutField();
			idField.setIndexable(false);
			idField.setName("id");
			idField.setResult(true);
			idField.setStat(false);
			idField.setTokenizable(false);
			idField.setType("objIdentifier");
			idField.setXpath("//dri:objIdentifier");
			fields.add(idField);
			LayoutField dateField = new LayoutField();
			dateField.setIndexable(false);
			dateField.setName("dateCollected");
			dateField.setResult(true);
			dateField.setStat(false);
			dateField.setTokenizable(false);
			dateField.setType("dateOfCollection");
			dateField.setXpath("//dri:dateOfCollection");
			fields.add(dateField);

			// TODO: use index field as key -- not web layout name
			HashMap<String, Vocabulary> vocabularyMap = new HashMap<String, Vocabulary>();
			for (Searchable s : lm.getWebLayoutManager().getSearchFields()) {
				String vocabulary = s.getVocabulary();
				if (s != null) {
					vocabularyMap.put(s.getName(), criteriaManager
							.getVocabularyMap().get(vocabulary));
				}
			}

			reader = new DocumentReader(fields, nameMap, vocabularyMap);
		}
		return reader;
	}

	public Document retrieveDocument(String id) throws SearchServiceException {
		try {
			PublisherService publisher = getPublisherServiceLocator()
					.getService();
			return getReader().read(publisher.getResourceById(id, "DMF"));

		} catch (Throwable t) {
			throw new SearchServiceException(
					"Cannot retrieve document with id: " + id, t);
		}
	}

	public List<Document> retrieveDocuments(List<String> ids)
			throws SearchServiceException {

		List<Document> documents = new ArrayList<Document>();

		PublisherService publisher = getPublisherServiceLocator().getService();

		for (String id : ids) {
			try {
				String xmlDoc = publisher.getResourceById(id, "DMF");
				
				logger.debug("xml: " + xmlDoc);
				
				documents.add(getReader().read(xmlDoc));
				
			} catch (Throwable t) {
				throw new SearchServiceException(
						"Cannot retrieve document with id: " + id, t);
			}
		}
		
		return documents;
	}

	public DocumentPage retrieveDocuments(String id, List<String> ids,
			int page, int size) throws PublisherServiceException, SearchServiceException {
		
		PublisherService publisher = getPublisherServiceLocator().getService();

		ResultSet<String> rs = documentQueries.get(id);
		if (rs == null) {
			EPR epr = publisher.getResourcesByIds(ids, "DMF");
			rs = ResultSetFactory.createResultSet(epr);
			if (logger.isDebugEnabled()) {
				logger.debug("EPR : " + epr.getEpr());
			}
			documentQueries.put(id, rs);
			
		}
		
		DocumentPage documentPage = null;
		try {
			documentPage = new DocumentPage(rs, getReader(), size, page);
			
		} catch (InvalidatedResultSet e) {
			rs = reperformSearch(id);
		
		}

		// second attempt to load page data from new RS
		if (documentPage == null) {
			try {
				documentPage = new DocumentPage(rs, getReader(), size,
						page);

			} catch (InvalidatedResultSet irs) {
				throw new SearchServiceException(
						"Error reading data from ResultSet.", irs);
			}
		}

		return documentPage;
	}

	protected ResultSet<String> getRSClient(String epr)
			throws SearchServiceException {
		ResultSet<String> rs = null;

		try {
			rs = ResultSetFactory.createResultSet(new EPR(epr));

		} catch (Throwable t) {
			throw new SearchServiceException(
					"Cannot create result set client.", t);
		}

		return rs;
	}

	public int getCacheSize() {
		return cacheSize;
	}

	public void setCacheSize(int cacheSize) {
		this.cacheSize = cacheSize;
		QueryCache cache = new QueryCache(cacheSize);
		if (this.documentQueries != null) {
			cache.putAll(this.documentQueries);
		}
		this.documentQueries = cache;
	}

	/*
	 * public int getPageSize() { return pageSize; }
	 * 
	 * public void setPageSize(int pageSize) { this.pageSize = pageSize; }
	 */
	public CriteriaManager getCriteriaManager() {
		return criteriaManager;
	}

	public void setCriteriaManager(CriteriaManager criteriaManager) {
		this.criteriaManager = criteriaManager;
	}
}

class QueryCache extends LinkedHashMap<String, ResultSet<String>> {

	private static final long serialVersionUID = -2602457779996743778L;
	private int cacheSize = 0;

	/**
	 * Create the paret linked hash map with access order.
	 * 
	 */
	public QueryCache(int cacheSize) {
		super(cacheSize, 0.7f, true);
		this.cacheSize = cacheSize;
	}

	@Override
	protected boolean removeEldestEntry(Entry<String, ResultSet<String>> eldest) {
		if (this.size() > this.cacheSize) {
			eldest.getValue().close();
			return true;
		} else {
			return false;
		}
	}

	@Override
	public ResultSet<String> put(String key, ResultSet<String> value) {
		ResultSet<String> old = super.put(key, value);
		if (old != null) {
			old.close();
		}
		return old;
	}

	@Override
	public ResultSet<String> remove(Object key) {
		ResultSet<String> old = super.remove(key);
		if (old != null) {
			old.close();
		}
		return old;
	}
}
