package eu.dnetlib.data.mapreduce.hbase;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.Text;

import eu.dnetlib.data.mapreduce.hbase.index.PrepareFeedJob;
import eu.dnetlib.data.proto.TypeProtos.Type;

public class OafRowDecoder {

	private int type;

	private String id;

	public static OafRowDecoder decode(Text key) throws IllegalArgumentException {
		return new OafRowDecoder(key);
	}

	public static OafRowDecoder decode(ImmutableBytesWritable key) throws IllegalArgumentException {
		return new OafRowDecoder(new Text(key.copyBytes()));
	}

	private OafRowDecoder(Text key) throws IllegalArgumentException {

		final String rowKey = key.toString();
		int pos = rowKey.indexOf((char) PrepareFeedJob.bSEPARATOR[0]);

		try {
			this.type = Integer.parseInt(rowKey.substring(0, pos));
			this.id = rowKey.substring(pos + 1);
		} catch (Exception e) {
			this.type = -1;
			this.id = null;
		}
		//System.out.println(OafRowTypeDecoder.class.getName() +" decoded key: " + split);
	}

	public boolean isValid() {
		return this.id != null && this.type > 0;
	}
	
	public boolean hasType() {
		return getType() != null;
	}

	public Type getType() {
		if (type > 0) {
			return Type.valueOf(type);
		} else {
			return null;
		}
	}

	public String getId() {
		return id;
	}
}
