incorporating prior model for stochastic linear ranker

Exposing algorithm parameter
reset ranker
new load and get models
AutoCrossValidation

Change-Id: I6466711ea37912debcfc09da46fcde0dfbd88ee5
diff --git a/bordeaux/learning/stochastic_linear_ranker/java/android/bordeaux/learning/StochasticLinearRanker.java b/bordeaux/learning/stochastic_linear_ranker/java/android/bordeaux/learning/StochasticLinearRanker.java
index 12b7b29..35d3ec1 100644
--- a/bordeaux/learning/stochastic_linear_ranker/java/android/bordeaux/learning/StochasticLinearRanker.java
+++ b/bordeaux/learning/stochastic_linear_ranker/java/android/bordeaux/learning/StochasticLinearRanker.java
@@ -23,6 +23,8 @@
 import java.util.List;
 import java.util.Arrays;
 import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * Stochastic Linear Ranker, learns how to rank a sample. The learned rank score
@@ -35,19 +37,29 @@
  */
 public class StochasticLinearRanker {
     String TAG = "StochasticLinearRanker";
-
+    public static int VAR_NUM = 14;
     static public class Model implements Serializable {
-        public ArrayList<String> keys = new ArrayList<String>();
-        public ArrayList<Float> values = new ArrayList<Float>();
-        public ArrayList<Float> parameters = new ArrayList<Float>();
+        public HashMap<String, Float> weights = new HashMap<String, Float>();
+        public float weightNormalizer = 1;
+        public HashMap<String, String> parameters = new HashMap<String, String>();
     }
 
-    static int VAR_NUM = 15;
+    /**
+     * Initializing a ranker
+     */
     public StochasticLinearRanker() {
         mNativeClassifier = initNativeClassifier();
     }
 
     /**
+     * Reset the ranker
+     */
+    public void resetRanker(){
+        deleteNativeClassifier(mNativeClassifier);
+        mNativeClassifier = initNativeClassifier();
+    }
+
+    /**
      * Train the ranker with a pair of samples. A sample,  a pair of arrays of
      * keys and values. The first sample should have higher rank than the second
      * one.
@@ -71,38 +83,71 @@
     /**
      * Get the current model and parameters of ranker
      */
-    public Model getModel(){
-        Model model = new Model();
+    public Model getUModel(){
+        Model slrModel = new Model();
         int len = nativeGetLengthClassifier(mNativeClassifier);
-        String[] keys = new String[len];
-        float[] values = new float[len];
-        float[] param = new float[VAR_NUM];
-        nativeGetClassifier(keys, values, param, mNativeClassifier);
-        boolean add_flag;
-        for (int  i=0; i< keys.length ; i++){
-            add_flag = model.keys.add(keys[i]);
-            add_flag = model.values.add(values[i]);
-        }
-        for (int  i=0; i< param.length ; i++)
-            add_flag = model.parameters.add(param[i]);
-        return model;
+        String[] wKeys = new String[len];
+        float[] wValues = new float[len];
+        float wNormalizer = 1;
+        nativeGetWeightClassifier(wKeys, wValues, wNormalizer, mNativeClassifier);
+        slrModel.weightNormalizer = wNormalizer;
+        for (int  i=0; i< wKeys.length ; i++)
+            slrModel.weights.put(wKeys[i], wValues[i]);
+
+        String[] paramKeys = new String[VAR_NUM];
+        String[] paramValues = new String[VAR_NUM];
+        nativeGetParameterClassifier(paramKeys, paramValues, mNativeClassifier);
+        for (int  i=0; i< paramKeys.length ; i++)
+            slrModel.parameters.put(paramKeys[i], paramValues[i]);
+        return slrModel;
     }
 
     /**
-     * use the given model and parameters for ranker
+     * load the given model and parameters to the ranker
      */
     public boolean loadModel(Model model) {
-        float[] values = new float[model.values.size()];
-        float[] param = new float[model.parameters.size()];
-        for (int i = 0; i < model.values.size(); ++i) {
-            values[i]  = model.values.get(i);
+        String[] wKeys = new String[model.weights.size()];
+        float[] wValues = new float[model.weights.size()];
+        int i = 0 ;
+        for (Map.Entry<String, Float> e : model.weights.entrySet()){
+            wKeys[i] = e.getKey();
+            wValues[i] = e.getValue();
+            i++;
         }
-        for (int i = 0; i < model.parameters.size(); ++i) {
-            param[i]  = model.parameters.get(i);
+        boolean res = setModelWeights(wKeys, wValues, model.weightNormalizer);
+        if (!res)
+            return false;
+
+        for (Map.Entry<String, String> e : model.parameters.entrySet()){
+            res = setModelParameter(e.getKey(), e.getValue());
+            if (!res)
+                return false;
         }
-        String[] keys = new String[model.keys.size()];
-        model.keys.toArray(keys);
-        return nativeLoadClassifier(keys, values, param, mNativeClassifier);
+        return res;
+    }
+
+    public boolean setModelWeights(String[] keys, float [] values, float normalizer){
+        return nativeSetWeightClassifier(keys, values, normalizer, mNativeClassifier);
+    }
+
+    public boolean setModelParameter(String key, String value){
+        boolean res = nativeSetParameterClassifier(key, value, mNativeClassifier);
+        return res;
+    }
+
+    /**
+     * Print a model for debugging
+     */
+    public void print(Model model){
+        String Sw = "";
+        String Sp = "";
+        for (Map.Entry<String, Float> e : model.weights.entrySet())
+            Sw = Sw + "<" + e.getKey() + "," + e.getValue() + "> ";
+        for (Map.Entry<String, String> e : model.parameters.entrySet())
+            Sp = Sp + "<" + e.getKey() + "," + e.getValue() + "> ";
+        Log.i(TAG, "Weights are " + Sw);
+        Log.i(TAG, "Normalizer is " + model.weightNormalizer);
+        Log.i(TAG, "Parameters are " + Sp);
     }
 
     @Override
@@ -130,12 +175,19 @@
             float[] values_negative,
             int classifierPtr);
 
-    private native float nativeScoreSample(String[] keys,
-                                           float[] values,
-                                           int classifierPtr);
-    private native void nativeGetClassifier(String [] keys, float[] values, float[] param,
-                                             int classifierPtr);
-    private native boolean nativeLoadClassifier(String [] keys, float[] values,
-                                                 float[] param, int classifierPtr);
+    private native float nativeScoreSample(String[] keys, float[] values, int classifierPtr);
+
+    private native void nativeGetWeightClassifier(String [] keys, float[] values, float normalizer,
+                                                  int classifierPtr);
+
+    private native void nativeGetParameterClassifier(String [] keys, String[] values,
+                                                  int classifierPtr);
+
     private native int nativeGetLengthClassifier(int classifierPtr);
+
+    private native boolean nativeSetWeightClassifier(String [] keys, float[] values,
+                                                     float normalizer, int classifierPtr);
+
+    private native boolean nativeSetParameterClassifier(String key, String value,
+                                                        int classifierPtr);
 }
diff --git a/bordeaux/learning/stochastic_linear_ranker/jni/jni_stochastic_linear_ranker.cpp b/bordeaux/learning/stochastic_linear_ranker/jni/jni_stochastic_linear_ranker.cpp
index 6984a66..211f727 100644
--- a/bordeaux/learning/stochastic_linear_ranker/jni/jni_stochastic_linear_ranker.cpp
+++ b/bordeaux/learning/stochastic_linear_ranker/jni/jni_stochastic_linear_ranker.cpp
@@ -29,6 +29,7 @@
 
 void CreateSparseWeightVector(JNIEnv* env, const jobjectArray keys, const float* values,
     const int length, SparseWeightVector<string> * sample) {
+
   for (int i = 0; i < length; ++i) {
     jboolean iscopy;
     jstring s = (jstring) env->GetObjectArrayElement(keys, i);
@@ -38,6 +39,15 @@
   }
 }
 
+void ConvertParameter2Object(JNIEnv* env, jobjectArray *keys, jobjectArray *values,
+    const char * name , const char * paramValue, int index) {
+
+    jstring jstrK = env->NewStringUTF(name);
+    jstring jstrV = env->NewStringUTF(paramValue);
+    env->SetObjectArrayElement(*keys, index, jstrK);
+    env->SetObjectArrayElement(*values, index, jstrV);
+}
+
 void DecomposeSparseWeightVector(JNIEnv* env, jobjectArray *keys, jfloatArray *values,
     const int length, SparseWeightVector<string> *sample) {
 
@@ -46,9 +56,9 @@
   for ( SparseWeightVector<string>::Witer_const iter = w_.begin();
     iter != w_.end(); ++iter) {
     std::string key = iter->first;
+    float value = (float) iter->second;
     jstring jstr = env->NewStringUTF(key.c_str());
     env->SetObjectArrayElement(*keys, i, jstr);
-    double value = iter->second;
     jfloat s[1];
     s[0] = value;
     env->SetFloatArrayRegion(*values, i, 1, s);
@@ -56,115 +66,166 @@
   }
 }
 
-jboolean Java_android_bordeaux_learning_StochasticLinearRanker_nativeLoadClassifier(
+jboolean Java_android_bordeaux_learning_StochasticLinearRanker_nativeSetWeightClassifier(
     JNIEnv* env,
     jobject thiz,
     jobjectArray key_array_model,
     jfloatArray value_array_model,
-    jfloatArray value_array_param,
+    jfloat normalizer_model,
     jint paPtr) {
 
   StochasticLinearRanker<string>* classifier = (StochasticLinearRanker<string>*) paPtr;
-  if (classifier && key_array_model && value_array_model && value_array_param) {
+  if (classifier && key_array_model && value_array_model && normalizer_model) {
     const int keys_m_len = env->GetArrayLength(key_array_model);
     jfloat* values_m = env->GetFloatArrayElements(value_array_model, NULL);
     const int values_m_len = env->GetArrayLength(value_array_model);
-    jfloat* param_m = env->GetFloatArrayElements(value_array_param, NULL);
 
     if (values_m && key_array_model && values_m_len == keys_m_len) {
       SparseWeightVector<string> model;
       CreateSparseWeightVector(env, key_array_model, values_m, values_m_len, &model);
-      model.SetNormalizer((double) param_m[0]);
+      model.SetNormalizer(normalizer_model);
       classifier->LoadWeights(model);
-      classifier->SetIterationNumber((uint64) param_m[1]);
-      classifier->SetNormConstraint((double) param_m[2]);
-
-      switch ((int) param_m[3]){
-      case 0 :
-        classifier->SetRegularizationType(learning_stochastic_linear::L0);
-        break;
-      case 1 :
-        classifier->SetRegularizationType(learning_stochastic_linear::L1);
-        break;
-      case 2 :
-        classifier->SetRegularizationType(learning_stochastic_linear::L2);
-        break;
-      case 3 :
-        classifier->SetRegularizationType(learning_stochastic_linear::L1L2);
-        break;
-      case 4 :
-        classifier->SetRegularizationType(learning_stochastic_linear::L1LInf);
-        break;
-      }
-
-      classifier->SetLambda((double) param_m[4]);
-
-      switch ((int) param_m[5]){
-      case 0 :
-        classifier->SetUpdateType(learning_stochastic_linear::FULL_CS);
-        break;
-      case 1 :
-        classifier->SetUpdateType(learning_stochastic_linear::CLIP_CS);
-        break;
-      case 2 :
-        classifier->SetUpdateType(learning_stochastic_linear::REG_CS);
-        break;
-      case 3 :
-        classifier->SetUpdateType(learning_stochastic_linear::SL);
-        break;
-      case 4 :
-        classifier->SetUpdateType(learning_stochastic_linear::ADAPTIVE_REG);
-        break;
-      }
-
-      switch ((int) param_m[6]){
-      case 0 :
-        classifier->SetAdaptationMode(learning_stochastic_linear::CONST);
-        break;
-      case 1 :
-        classifier->SetAdaptationMode(learning_stochastic_linear::INV_LINEAR);
-        break;
-      case 2 :
-        classifier->SetAdaptationMode(learning_stochastic_linear::INV_QUADRATIC);
-        break;
-      case 3 :
-        classifier->SetAdaptationMode(learning_stochastic_linear::INV_SQRT);
-        break;
-      }
-
-      switch ((int) param_m[7]){
-      case 0 :
-        classifier->SetKernelType(learning_stochastic_linear::LINEAR, (double) param_m[8],
-                                  (double) param_m[9],(double) param_m[10]);
-        break;
-      case 1 : classifier->SetKernelType(learning_stochastic_linear::POLY, (double) param_m[8],
-                                         (double) param_m[9],(double) param_m[10]);
-        break;
-      case 2 : classifier->SetKernelType(learning_stochastic_linear::RBF, (double) param_m[8],
-                                          (double) param_m[9],(double) param_m[10]);
-        break;
-      }
-
-      switch ((int) param_m[11]){
-      case 0 :
-        classifier->SetRankLossType(learning_stochastic_linear::PAIRWISE);
-        break;
-      case 1 :
-        classifier->SetRankLossType(learning_stochastic_linear::RECIPROCAL_RANK);
-        break;
-      }
-
-      classifier->SetAcceptanceProbability((double) param_m[12]);
-      classifier->SetMiniBatchSize((uint64)param_m[13]);
-      classifier->SetGradientL0Norm((int32)param_m[14]);
       env->ReleaseFloatArrayElements(value_array_model, values_m, JNI_ABORT);
-      env->ReleaseFloatArrayElements(value_array_param, param_m, JNI_ABORT);
       return JNI_TRUE;
     }
   }
   return JNI_FALSE;
 }
 
+jboolean Java_android_bordeaux_learning_StochasticLinearRanker_nativeSetParameterClassifier(
+    JNIEnv* env,
+    jobject thiz,
+    jstring key,
+    jstring value,
+    jint paPtr) {
+
+  StochasticLinearRanker<string>* classifier = (StochasticLinearRanker<string>*) paPtr;
+  jboolean iscopy;
+  const char *cKey = env->GetStringUTFChars(key, &iscopy);
+  const char *cValue = env->GetStringUTFChars(value, &iscopy);
+  float v;
+  if (strcmp(cKey, ITR_NUM) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetIterationNumber((uint64) v);
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, NORM_CONSTRAINT) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetNormConstraint((double) v);
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, REG_TYPE) == 0){
+    if (strcmp(cValue, REG_TYPE_L0 ) == 0)
+      classifier->SetRegularizationType(learning_stochastic_linear::L0);
+    else if (strcmp(cValue, REG_TYPE_L1 ) == 0)
+      classifier->SetRegularizationType(learning_stochastic_linear::L1);
+    else if (strcmp(cValue, REG_TYPE_L2 ) == 0)
+      classifier->SetRegularizationType(learning_stochastic_linear::L2);
+    else if (strcmp(cValue, REG_TYPE_L1L2 ) == 0)
+      classifier->SetRegularizationType(learning_stochastic_linear::L1L2);
+    else if (strcmp(cValue, REG_TYPE_L1LInf ) == 0)
+      classifier->SetRegularizationType(learning_stochastic_linear::L1LInf);
+    else {
+      ALOGE("Error: %s is not a Regularization Type", cValue);
+      return JNI_FALSE;
+    }
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, LAMBDA) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetLambda((double) v);
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, UPDATE_TYPE) == 0){
+    if (strcmp(cValue, UPDATE_TYPE_FULL_CS) == 0)
+      classifier->SetUpdateType(learning_stochastic_linear::FULL_CS);
+    else if (strcmp(cValue, UPDATE_TYPE_CLIP_CS) == 0)
+      classifier->SetUpdateType(learning_stochastic_linear::CLIP_CS);
+    else if (strcmp(cValue, UPDATE_TYPE_REG_CS ) == 0)
+      classifier->SetUpdateType(learning_stochastic_linear::REG_CS);
+    else if (strcmp(cValue, UPDATE_TYPE_SL) == 0)
+      classifier->SetUpdateType(learning_stochastic_linear::SL);
+    else if (strcmp(cValue, UPDATE_TYPE_ADAPTIVE_REG) == 0)
+      classifier->SetUpdateType(learning_stochastic_linear::ADAPTIVE_REG);
+    else {
+      ALOGE("Error: %s is not an Update Type", cValue);
+      return JNI_FALSE;
+    }
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, ADAPT_MODE) == 0){
+    if (strcmp(cValue, ADAPT_MODE_CONST ) == 0)
+      classifier->SetAdaptationMode(learning_stochastic_linear::CONST);
+    else if (strcmp(cValue, ADAPT_MODE_INV_LINEAR ) == 0)
+      classifier->SetAdaptationMode(learning_stochastic_linear::INV_LINEAR);
+    else if (strcmp(cValue, ADAPT_MODE_INV_QUADRATIC ) == 0)
+      classifier->SetAdaptationMode(learning_stochastic_linear::INV_QUADRATIC);
+    else if (strcmp(cValue, ADAPT_MODE_INV_SQRT ) == 0)
+      classifier->SetAdaptationMode(learning_stochastic_linear::INV_SQRT);
+    else {
+      ALOGE("Error: %s is not an Adaptation Mode", cValue);
+      return JNI_FALSE;
+    }
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, KERNEL_TYPE) == 0){
+    if (strcmp(cValue, KERNEL_TYPE_LINEAR ) == 0)
+      classifier->SetKernelType(learning_stochastic_linear::LINEAR);
+    else if (strcmp(cValue, KERNEL_TYPE_POLY ) == 0)
+      classifier->SetKernelType(learning_stochastic_linear::POLY);
+    else if (strcmp(cValue, KERNEL_TYPE_RBF ) == 0)
+      classifier->SetKernelType(learning_stochastic_linear::RBF);
+    else {
+      ALOGE("Error: %s is not a Kernel Type", cValue);
+      return JNI_FALSE;
+    }
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, KERNEL_PARAM) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetKernelParam((double) v);
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, KERNEL_GAIN) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetKernelGain((double) v);
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, KERNEL_BIAS) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetKernelBias((double) v);
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, LOSS_TYPE) == 0){
+    if (strcmp(cValue, LOSS_TYPE_PAIRWISE ) == 0)
+      classifier->SetRankLossType(learning_stochastic_linear::PAIRWISE);
+    else if (strcmp(cValue, LOSS_TYPE_RECIPROCAL_RANK ) == 0)
+      classifier->SetRankLossType(learning_stochastic_linear::RECIPROCAL_RANK);
+    else {
+      ALOGE("Error: %s is not a Kernel Type", cValue);
+      return JNI_FALSE;
+    }
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, ACC_PROB) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetAcceptanceProbability((double) v);
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, MIN_BATCH_SIZE) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetMiniBatchSize((uint64) v);
+    return JNI_TRUE;
+  }
+  else if (strcmp(cKey, GRAD_L0_NORM) == 0){
+    sscanf(cValue, "%f", &v);
+    classifier->SetGradientL0Norm((int32) v);
+    return JNI_TRUE;
+  }
+  ALOGE("Error: %s is not a ranker parameter", cKey);
+  return JNI_FALSE;
+}
+
 jint Java_android_bordeaux_learning_StochasticLinearRanker_nativeGetLengthClassifier(
   JNIEnv* env,
   jobject thiz,
@@ -179,65 +240,155 @@
   return len;
 }
 
-void Java_android_bordeaux_learning_StochasticLinearRanker_nativeGetClassifier(
+std::string ConvertFloat2String(float v){
+    std::stringstream converter;
+    converter << v;
+    return converter.str();
+}
+
+void Java_android_bordeaux_learning_StochasticLinearRanker_nativeGetParameterClassifier(
+    JNIEnv* env,
+    jobject thiz,
+    jobjectArray key_array_param,
+    jobjectArray value_array_param,
+    jint paPtr){
+
+  std::string s;
+  StochasticLinearRanker<string>* classifier = (StochasticLinearRanker<string>*) paPtr;
+  s = ConvertFloat2String((float) classifier->GetIterationNumber());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, ITR_NUM, s.c_str(), 0 );
+
+  s = ConvertFloat2String((float) classifier->GetNormContraint());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, NORM_CONSTRAINT, s.c_str(), 1 );
+
+  float value = (float) classifier->GetRegularizationType();
+  switch ((int) value) {
+    case learning_stochastic_linear::L0 :
+      s = REG_TYPE_L0;
+      break;
+    case learning_stochastic_linear::L1 :
+      s = REG_TYPE_L1;
+      break;
+    case learning_stochastic_linear::L2 :
+      s = REG_TYPE_L2;
+      break;
+    case learning_stochastic_linear::L1L2 :
+      s = REG_TYPE_L1L2;
+      break;
+    case learning_stochastic_linear::L1LInf :
+      s = REG_TYPE_L1LInf;
+      break;
+  }
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, REG_TYPE, s.c_str(), 2 );
+
+  s = ConvertFloat2String((float) classifier->GetLambda());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, LAMBDA, s.c_str(), 3 );
+
+  value = (float) classifier->GetUpdateType();
+  switch ((int) value) {
+    case learning_stochastic_linear::FULL_CS :
+      s = UPDATE_TYPE_FULL_CS;
+      break;
+    case learning_stochastic_linear::CLIP_CS :
+      s = UPDATE_TYPE_CLIP_CS;
+      break;
+    case learning_stochastic_linear::REG_CS :
+      s = UPDATE_TYPE_REG_CS;
+      break;
+    case learning_stochastic_linear::SL :
+      s = UPDATE_TYPE_SL;
+      break;
+    case learning_stochastic_linear::ADAPTIVE_REG :
+      s = UPDATE_TYPE_ADAPTIVE_REG;
+      break;
+  }
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, UPDATE_TYPE, s.c_str(), 4 );
+
+  value = (float) classifier->GetAdaptationMode();
+  switch ((int) value) {
+    case learning_stochastic_linear::CONST :
+      s = ADAPT_MODE_CONST;
+      break;
+    case learning_stochastic_linear::INV_LINEAR :
+      s = ADAPT_MODE_INV_LINEAR;
+      break;
+    case learning_stochastic_linear::INV_QUADRATIC :
+      s = ADAPT_MODE_INV_QUADRATIC;
+      break;
+    case learning_stochastic_linear::INV_SQRT :
+      s = ADAPT_MODE_INV_SQRT;
+      break;
+  }
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, ADAPT_MODE, s.c_str(), 5 );
+
+  value = (float) classifier->GetKernelType();
+  switch ((int) value) {
+    case learning_stochastic_linear::LINEAR :
+      s = KERNEL_TYPE_LINEAR;
+      break;
+    case learning_stochastic_linear::POLY :
+      s = KERNEL_TYPE_POLY;
+      break;
+    case learning_stochastic_linear::RBF :
+      s = KERNEL_TYPE_RBF;
+      break;
+  }
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, KERNEL_TYPE, s.c_str(), 6 );
+
+  s = ConvertFloat2String((float) classifier->GetKernelParam());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, KERNEL_PARAM, s.c_str(), 7 );
+
+  s = ConvertFloat2String((float) classifier->GetKernelGain());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, KERNEL_GAIN, s.c_str(), 8 );
+
+  s = ConvertFloat2String((float)classifier->GetKernelBias());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, KERNEL_BIAS, s.c_str(), 9 );
+
+  value = (float) classifier->GetRankLossType();
+  switch ((int) value) {
+    case learning_stochastic_linear::PAIRWISE :
+      s = LOSS_TYPE_PAIRWISE;
+      break;
+    case learning_stochastic_linear::RECIPROCAL_RANK :
+      s = LOSS_TYPE_RECIPROCAL_RANK;
+      break;
+  }
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, LOSS_TYPE, s.c_str(), 10 );
+
+  s = ConvertFloat2String((float) classifier->GetAcceptanceProbability());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, ACC_PROB, s.c_str(), 11 );
+
+  s = ConvertFloat2String((float) classifier->GetMiniBatchSize());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, MIN_BATCH_SIZE, s.c_str(), 12 );
+
+  s = ConvertFloat2String((float) classifier->GetGradientL0Norm());
+  ConvertParameter2Object(env, &key_array_param, &value_array_param, GRAD_L0_NORM, s.c_str(), 13 );
+}
+
+void Java_android_bordeaux_learning_StochasticLinearRanker_nativeGetWeightClassifier(
   JNIEnv* env,
   jobject thiz,
   jobjectArray key_array_model,
   jfloatArray value_array_model,
-  jfloatArray value_array_param,
+  jfloat normalizer,
   jint paPtr) {
 
   StochasticLinearRanker<string>* classifier = (StochasticLinearRanker<string>*) paPtr;
-
   SparseWeightVector<string> M_weights;
   classifier->SaveWeights(&M_weights);
-  double Jni_weight_normalizer = M_weights.GetNormalizer();
-  int Jni_itr_num = classifier->GetIterationNumber();
-  double Jni_norm_cont = classifier->GetNormContraint();
-  int Jni_reg_type = classifier->GetRegularizationType();
-  double Jni_lambda = classifier->GetLambda();
-  int Jni_update_type = classifier->GetUpdateType();
-  int Jni_AdaptationMode = classifier->GetAdaptationMode();
-  double Jni_kernel_param, Jni_kernel_gain, Jni_kernel_bias;
-  int Jni_kernel_type = classifier->GetKernelType(&Jni_kernel_param, &Jni_kernel_gain, &Jni_kernel_bias);
-  int Jni_rank_loss_type = classifier->GetRankLossType();
-  double Jni_accp_prob = classifier->GetAcceptanceProbability();
-  uint64 Jni_min_batch_size = classifier->GetMiniBatchSize();
-  int32 Jni_GradL0Norm = classifier->GetGradientL0Norm();
-  const int Var_num = 15;
-  jfloat s[Var_num]= {  (float) Jni_weight_normalizer,
-                        (float) Jni_itr_num,
-                        (float) Jni_norm_cont,
-                        (float) Jni_reg_type,
-                        (float) Jni_lambda,
-                        (float) Jni_update_type,
-                        (float) Jni_AdaptationMode,
-                        (float) Jni_kernel_type,
-                        (float) Jni_kernel_param,
-                        (float) Jni_kernel_gain,
-                        (float) Jni_kernel_bias,
-                        (float) Jni_rank_loss_type,
-                        (float) Jni_accp_prob,
-                        (float) Jni_min_batch_size,
-                        (float) Jni_GradL0Norm};
-
-  env->SetFloatArrayRegion(value_array_param, 0, Var_num, s);
-
   SparseWeightVector<string>::Wmap w_map = M_weights.GetMap();
   int array_len = w_map.size();
 
+  normalizer = M_weights.GetNormalizer();
   DecomposeSparseWeightVector(env, &key_array_model, &value_array_model, array_len, &M_weights);
 }
 
 jint Java_android_bordeaux_learning_StochasticLinearRanker_initNativeClassifier(JNIEnv* env,
                              jobject thiz) {
   StochasticLinearRanker<string>* classifier = new StochasticLinearRanker<string>();
-  classifier->SetUpdateType(learning_stochastic_linear::REG_CS);
-  classifier->SetRegularizationType(learning_stochastic_linear::L2);
   return ((jint) classifier);
 }
 
-
 jboolean Java_android_bordeaux_learning_StochasticLinearRanker_deleteNativeClassifier(JNIEnv* env,
                                jobject thiz,
                                jint paPtr) {
@@ -285,7 +436,6 @@
   return JNI_FALSE;
 }
 
-
 jfloat Java_android_bordeaux_learning_StochasticLinearRanker_nativeScoreSample(
   JNIEnv* env,
   jobject thiz,
diff --git a/bordeaux/learning/stochastic_linear_ranker/jni/jni_stochastic_linear_ranker.h b/bordeaux/learning/stochastic_linear_ranker/jni/jni_stochastic_linear_ranker.h
index 3b9c6d6..0f20c90 100644
--- a/bordeaux/learning/stochastic_linear_ranker/jni/jni_stochastic_linear_ranker.h
+++ b/bordeaux/learning/stochastic_linear_ranker/jni/jni_stochastic_linear_ranker.h
@@ -23,6 +23,88 @@
 extern "C" {
 #endif
 
+/*  Counts the number of learning iterations. */
+const char * ITR_NUM = "IterationNumber";
+
+/*  The maximum norm of the weight vector. If norm of weights are larger than NormConstraint
+    they will be reprojected using RegularizationType to satisfy this constraint. */
+const char * NORM_CONSTRAINT = "NormConstraint";
+
+/*  Ddetermines type of the regularization used in learning.
+    This regularization can be based on different norms.
+    Options: "L0", "L1", "L2", "L1L2", "L1LInf".
+    Default : LINEAR */
+const char * REG_TYPE = "RegularizationType";
+
+/*  Lambda is a factor that is multiplied with the step size in learning. This can be used
+    to change the step size.
+    Default : 1.0 */
+const char * LAMBDA = "Lambda";
+
+/*  This parameter determines the update type in learning process.
+    Options: "FULL_CS" , "CLIP_CS", "REG_CS", "SL", "ADAPTIVE_REG"
+    Default : "SL" */
+const char * UPDATE_TYPE = "UpdateType";
+
+/*  Options: "CONST", "INV_LINEAR", "INV_QUADRATIC", "INV_SQRT"
+    Default: "INV_LINEAR". */
+const char * ADAPT_MODE = "AdaptationMode";
+
+/*  Three differnt kernels are supported: Linear "LINEAR", Polynomial "POLY", and RBF "RBF"
+    Default : "LINEAR" */
+const char * KERNEL_TYPE = "KernelType";
+
+/*  Kernel param is kernel-specific. In case of polynomial kernel, it is the degree of the
+    polynomial. In case of RBF kernel, it implies the sigma parameter. In case of linear
+    kernel, it is not used. */
+const char * KERNEL_PARAM = "KernelParameter";
+
+/*  Kernel gain is typically a multiplicative factor to the dot product while calculating
+    the kernel function. In most use cases, gain should be set to 1.0. */
+const char * KERNEL_GAIN = "KernelGain";
+
+/*  Kernel bias is typically an additive factors to the dot product while calculating
+    the kernel function. In most use cases, bias should be set to 0.0. */
+const char * KERNEL_BIAS = "KernelBias";
+
+/*  This parameter determines the type of loss function to minimize.
+    Options : "PAIRWISE", "RECIPROCAL_RANK"
+    Default : "PAIRWISE" */
+const char * LOSS_TYPE = "LossType";
+
+/*  The minimum percent of training pairs that are used in training.
+    Default : "0.1" */
+const char * ACC_PROB = "AcceptaceProbability";
+
+/*  The code averages out gradient updates for MinimumBatchSize samples
+    before performing an iteration of the algorithm. */
+const char * MIN_BATCH_SIZE = "MinimumBatchSize";
+
+/*  Specifies the number of non-zero entries allowed in a gradient.
+    Default is -1 which means we take the gradient as given by data without
+    adding any new constraints. positive number is treated as an L0 constraint */
+const char * GRAD_L0_NORM = "GradientL0Nrom";
+
+const char * REG_TYPE_L0 = "L0";
+const char * REG_TYPE_L1 = "L1";
+const char * REG_TYPE_L2 = "L2";
+const char * REG_TYPE_L1L2 = "L1L2";
+const char * REG_TYPE_L1LInf = "L1LInf";
+const char * UPDATE_TYPE_FULL_CS = "FULL_CS";
+const char * UPDATE_TYPE_CLIP_CS = "CLIP_CS";
+const char * UPDATE_TYPE_REG_CS = "REG_CS";
+const char * UPDATE_TYPE_SL = "SL";
+const char * UPDATE_TYPE_ADAPTIVE_REG = "ADAPTIVE_REG";
+const char * ADAPT_MODE_CONST = "CONST";
+const char * ADAPT_MODE_INV_LINEAR = "INV_LINEAR";
+const char * ADAPT_MODE_INV_QUADRATIC = "INV_QUADRATIC";
+const char * ADAPT_MODE_INV_SQRT = "INV_SQRT";
+const char * KERNEL_TYPE_LINEAR = "LINEAR";
+const char * KERNEL_TYPE_POLY = "POLY";
+const char * KERNEL_TYPE_RBF = "RBF";
+const char * LOSS_TYPE_PAIRWISE = "PAIRWISE";
+const char * LOSS_TYPE_RECIPROCAL_RANK = "RECIPROCAL_RANK";
+
 JNIEXPORT jint JNICALL
 Java_android_bordeaux_learning_StochasticLinearRanker_initNativeClassifier(
     JNIEnv* env,
@@ -54,12 +136,20 @@
     jint paPtr);
 
 JNIEXPORT void JNICALL
-Java_android_bordeaux_learning_StochasticLinearRanker_nativeGetClassifier(
+Java_android_bordeaux_learning_StochasticLinearRanker_nativeGetWeightClassifier(
     JNIEnv* env,
     jobject thiz,
-    jobjectArray key_array_model,
-    jfloatArray value_array_model,
-    jfloatArray value_array_param,
+    jobjectArray key_array_weight,
+    jfloatArray value_array_weight,
+    jfloat normalizer,
+    jint paPtr);
+
+JNIEXPORT void JNICALL
+Java_android_bordeaux_learning_StochasticLinearRanker_nativeGetParameterClassifier(
+    JNIEnv* env,
+    jobject thiz,
+    jobjectArray key_array_param,
+    jobjectArray value_array_param,
     jint paPtr);
 
 JNIEXPORT jint JNICALL
@@ -69,12 +159,20 @@
     jint paPtr);
 
 JNIEXPORT jboolean JNICALL
-Java_android_bordeaux_learning_StochasticLinearRanker_nativeLoadClassifier(
+Java_android_bordeaux_learning_StochasticLinearRanker_nativeSetWeightClassifier(
     JNIEnv* env,
     jobject thiz,
     jobjectArray key_array_model,
     jfloatArray value_array_model,
-    jfloatArray value_array_param,
+    jfloat normalizer_model,
+    jint paPtr);
+
+JNIEXPORT jboolean JNICALL
+Java_android_bordeaux_learning_StochasticLinearRanker_nativeSetParameterClassifier(
+    JNIEnv* env,
+    jobject thiz,
+    jstring key,
+    jstring value,
     jint paPtr);
 
 #ifdef __cplusplus
diff --git a/bordeaux/learning/stochastic_linear_ranker/native/stochastic_linear_ranker.h b/bordeaux/learning/stochastic_linear_ranker/native/stochastic_linear_ranker.h
index 21586db..cfcc49b 100644
--- a/bordeaux/learning/stochastic_linear_ranker/native/stochastic_linear_ranker.h
+++ b/bordeaux/learning/stochastic_linear_ranker/native/stochastic_linear_ranker.h
@@ -48,8 +48,10 @@
     learning_rate_controller_.SetLambda(lambda_);
     mini_batch_size_ = 1;
     learning_rate_controller_.SetMiniBatchSize(mini_batch_size_);
-    learning_rate_controller_.SetAdaptationMode(INV_LINEAR);
+    adaptation_mode_ = INV_LINEAR;
+    learning_rate_controller_.SetAdaptationMode(adaptation_mode_);
     update_type_ = SL;
+    regularization_type_ = L2;
     kernel_type_ = LINEAR;
     kernel_param_ = 1.0;
     kernel_gain_ = 1.0;
@@ -87,16 +89,21 @@
   AdaptationMode GetAdaptationMode() const {
     return adaptation_mode_;
   }
-  // This function additionally returns the basic kernel parameter. In case of
+  KernelType GetKernelType() const {
+    return kernel_type_;
+  }
+  // This function returns the basic kernel parameter. In case of
   // polynomial kernel, it implies the degree of the polynomial.  In case of
   // RBF kernel, it implies the sigma parameter. In case of linear kernel,
   // it is not used.
-  // It also returns the kernel gain and bias.
-  KernelType GetKernelType(double *param, double *gain, double *bias) const {
-    *param = kernel_param_;
-    *gain = kernel_gain_;
-    *bias = kernel_bias_;
-    return kernel_type_;
+  double GetKernelParam() const {
+    return kernel_param_;
+  }
+  double GetKernelGain() const {
+    return kernel_gain_;;
+  }
+  double GetKernelBias() const {
+    return kernel_bias_;
   }
   RankLossType GetRankLossType() const {
     return rank_loss_type_;
@@ -125,16 +132,24 @@
     adaptation_mode_ = m;
     learning_rate_controller_.SetAdaptationMode(m);
   }
-  // This function additionally sets the basic kernel parameter. In case of
+  void SetKernelType(KernelType k ) {
+    kernel_type_ = k;
+  }
+  // This function sets the basic kernel parameter. In case of
   // polynomial kernel, it implies the degree of the polynomial. In case of
   // RBF kernel, it implies the sigma parameter. In case of linear kernel,
   // it is not used.
-  // It also sets the kernel gain and bias. NOTE: in most use cases, gain should
-  // be set to 1.0 and bias to 0.0.
-  void SetKernelType(KernelType k, double param, double gain, double bias) {
-    kernel_type_ = k;
+  void SetKernelParam(double param) {
     kernel_param_ = param;
+  }
+  // This function sets the kernel gain. NOTE: in most use cases, gain should
+  // be set to 1.0.
+  void SetKernelGain(double gain) {
     kernel_gain_ = gain;
+  }
+  // This function sets the kernel bias. NOTE: in most use cases, bias should
+  // be set to 0.0.
+  void SetKernelBias(double bias) {
     kernel_bias_ = bias;
   }
   void SetUpdateType(UpdateType u) {
diff --git a/bordeaux/service/Android.mk b/bordeaux/service/Android.mk
index 0283083..c49a6d6 100644
--- a/bordeaux/service/Android.mk
+++ b/bordeaux/service/Android.mk
@@ -34,6 +34,7 @@
         src/android/bordeaux/services/BordeauxManagerService.java \
         src/android/bordeaux/services/IBordeauxLearner.java \
         src/android/bordeaux/services/Learning_StochasticLinearRanker.java \
+        src/android/bordeaux/services/StochasticLinearRankerWithPrior.java \
         src/android/bordeaux/services/IBordeauxServiceCallback.aidl \
         src/android/bordeaux/services/ILearning_MulticlassPA.aidl \
         src/android/bordeaux/services/ILearning_StochasticLinearRanker.aidl \
diff --git a/bordeaux/service/src/android/bordeaux/services/BordeauxRanker.java b/bordeaux/service/src/android/bordeaux/services/BordeauxRanker.java
index a0771dc..1977ce1 100644
--- a/bordeaux/service/src/android/bordeaux/services/BordeauxRanker.java
+++ b/bordeaux/service/src/android/bordeaux/services/BordeauxRanker.java
@@ -53,7 +53,7 @@
         return stringfloat_sample;
     }
 
-    private boolean retrieveRanker() {
+    public boolean retrieveRanker() {
         if (mRanker == null)
             mRanker = BordeauxManagerService.getRanker(mContext, mName);
         // if classifier is not available, return false
@@ -91,6 +91,19 @@
         return true;
     }
 
+    public boolean reset() {
+        if (!retrieveRanker()){
+            Log.e(TAG,"Exception: Ranker is not availible");
+            return false;
+        }
+        try {
+            mRanker.ResetRanker();
+            return true;
+        } catch (RemoteException e) {
+        }
+        return false;
+    }
+
     public float scoreSample(final HashMap<String, Float> sample) {
         if (!retrieveRanker())
             throw new RuntimeException(RANKER_NOTAVAILABLE);
@@ -102,13 +115,25 @@
         }
     }
 
-    public void loadModel(String filename) {
-        // no longer availabe through the interface
-        return;
+    public boolean setPriorWeight(final HashMap<String, Float> sample) {
+        if (!retrieveRanker())
+            throw new RuntimeException(RANKER_NOTAVAILABLE);
+        try {
+            return mRanker.SetModelPriorWeight(getArrayList(sample));
+        } catch (RemoteException e) {
+            Log.e(TAG,"Exception: set prior Weights");
+            throw new RuntimeException(RANKER_NOTAVAILABLE);
+        }
     }
 
-    public String saveModel(String filename) {
-        // no longer availabe through the interface
-        return null;
+    public boolean setParameter(String key, String value) {
+        if (!retrieveRanker())
+            throw new RuntimeException(RANKER_NOTAVAILABLE);
+        try {
+            return mRanker.SetModelParameter(key, value);
+        } catch (RemoteException e) {
+            Log.e(TAG,"Exception: scoring the sample with prior.");
+            throw new RuntimeException(RANKER_NOTAVAILABLE);
+        }
     }
 }
diff --git a/bordeaux/service/src/android/bordeaux/services/ILearning_StochasticLinearRanker.aidl b/bordeaux/service/src/android/bordeaux/services/ILearning_StochasticLinearRanker.aidl
index b0bb5c1..f89ce0a 100644
--- a/bordeaux/service/src/android/bordeaux/services/ILearning_StochasticLinearRanker.aidl
+++ b/bordeaux/service/src/android/bordeaux/services/ILearning_StochasticLinearRanker.aidl
@@ -28,5 +28,7 @@
 
     boolean UpdateClassifier(in List<StringFloat> sample_1, in List<StringFloat> sample_2);
     float ScoreSample(in List<StringFloat> sample);
-
+    void ResetRanker();
+    boolean SetModelPriorWeight(in List<StringFloat> weight);
+    boolean SetModelParameter(in String key, in String value);
 }
diff --git a/bordeaux/service/src/android/bordeaux/services/Learning_StochasticLinearRanker.java b/bordeaux/service/src/android/bordeaux/services/Learning_StochasticLinearRanker.java
index bb62658..ab51f94 100644
--- a/bordeaux/service/src/android/bordeaux/services/Learning_StochasticLinearRanker.java
+++ b/bordeaux/service/src/android/bordeaux/services/Learning_StochasticLinearRanker.java
@@ -17,28 +17,37 @@
 package android.bordeaux.services;
 
 import android.bordeaux.learning.StochasticLinearRanker;
-import android.bordeaux.learning.StochasticLinearRanker.Model;
 import android.bordeaux.services.IBordeauxLearner.ModelChangeCallback;
 import android.os.IBinder;
 import android.util.Log;
-
+import java.util.List;
+import java.util.ArrayList;
 import java.io.*;
 import java.lang.ClassNotFoundException;
 import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Scanner;
+import java.io.ByteArrayOutputStream;
+import java.util.HashMap;
+import java.util.Map;
 
 public class Learning_StochasticLinearRanker extends ILearning_StochasticLinearRanker.Stub
         implements IBordeauxLearner {
 
-    String TAG = "ILearning_StochasticLinearRanker";
-    private StochasticLinearRanker mLearningSlRanker = null;
+    private final String TAG = "ILearning_StochasticLinearRanker";
+    private StochasticLinearRankerWithPrior mLearningSlRanker = null;
     private ModelChangeCallback modelChangeCallback = null;
 
     public Learning_StochasticLinearRanker(){
     }
 
+    public void ResetRanker(){
+        if (mLearningSlRanker == null)
+            mLearningSlRanker = new StochasticLinearRankerWithPrior();
+        mLearningSlRanker.resetRanker();
+    }
+
     public boolean UpdateClassifier(List<StringFloat> sample_1, List<StringFloat> sample_2){
         ArrayList<StringFloat> temp_1 = (ArrayList<StringFloat>)sample_1;
         String[] keys_1 = new String[temp_1.size()];
@@ -54,7 +63,8 @@
             keys_2[i] = temp_2.get(i).key;
             values_2[i] = temp_2.get(i).value;
         }
-        if (mLearningSlRanker == null) mLearningSlRanker = new StochasticLinearRanker();
+        if (mLearningSlRanker == null)
+            mLearningSlRanker = new StochasticLinearRankerWithPrior();
         boolean res = mLearningSlRanker.updateClassifier(keys_1,values_1,keys_2,values_2);
         if (res && modelChangeCallback != null) {
             modelChangeCallback.modelChanged(this);
@@ -70,16 +80,32 @@
             keys[i] = temp.get(i).key;
             values[i] = temp.get(i).value;
         }
-        if (mLearningSlRanker == null) mLearningSlRanker = new StochasticLinearRanker();
-        float res=mLearningSlRanker .scoreSample(keys,values);
-        res = (float) (Math.exp(res)/(Math.exp(res)+Math.exp(-res)));
-        return res;
+        if (mLearningSlRanker == null)
+            mLearningSlRanker = new StochasticLinearRankerWithPrior();
+        return mLearningSlRanker.scoreSample(keys,values);
+    }
+
+    public boolean SetModelPriorWeight(List<StringFloat> sample) {
+        ArrayList<StringFloat> temp = (ArrayList<StringFloat>)sample;
+        HashMap<String, Float> weights = new HashMap<String, Float>();
+        for (int i = 0; i < temp.size(); i++)
+            weights.put(temp.get(i).key, temp.get(i).value);
+        if (mLearningSlRanker == null)
+            mLearningSlRanker = new StochasticLinearRankerWithPrior();
+        return mLearningSlRanker.setModelPriorWeights(weights);
+    }
+
+    public boolean SetModelParameter(String key, String value) {
+        if (mLearningSlRanker == null)
+            mLearningSlRanker = new StochasticLinearRankerWithPrior();
+        return mLearningSlRanker.setModelParameter(key,value);
     }
 
     // Beginning of the IBordeauxLearner Interface implementation
     public byte [] getModel() {
-        if (mLearningSlRanker == null) mLearningSlRanker = new StochasticLinearRanker();
-        Model model = mLearningSlRanker.getModel();
+        if (mLearningSlRanker == null)
+            mLearningSlRanker = new StochasticLinearRankerWithPrior();
+        StochasticLinearRankerWithPrior.Model model = mLearningSlRanker.getModel();
         try {
             ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
             ObjectOutputStream objStream = new ObjectOutputStream(byteStream);
@@ -97,8 +123,10 @@
         try {
             ByteArrayInputStream input = new ByteArrayInputStream(modelData);
             ObjectInputStream objStream = new ObjectInputStream(input);
-            Model model = (Model) objStream.readObject();
-            if (mLearningSlRanker == null) mLearningSlRanker = new StochasticLinearRanker();
+            StochasticLinearRankerWithPrior.Model model =
+                    (StochasticLinearRankerWithPrior.Model) objStream.readObject();
+            if (mLearningSlRanker == null)
+                mLearningSlRanker = new StochasticLinearRankerWithPrior();
             boolean res = mLearningSlRanker.loadModel(model);
             Log.i(TAG, "LoadModel: " + modelData);
             return res;
diff --git a/bordeaux/service/src/android/bordeaux/services/StochasticLinearRankerWithPrior.java b/bordeaux/service/src/android/bordeaux/services/StochasticLinearRankerWithPrior.java
new file mode 100644
index 0000000..fd56a2f
--- /dev/null
+++ b/bordeaux/service/src/android/bordeaux/services/StochasticLinearRankerWithPrior.java
@@ -0,0 +1,211 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.bordeaux.services;
+import android.util.Log;
+
+import android.bordeaux.learning.StochasticLinearRanker;
+import java.util.HashMap;
+import java.util.Map;
+import java.io.Serializable;
+
+public class StochasticLinearRankerWithPrior extends StochasticLinearRanker {
+    private final String TAG = "StochasticLinearRankerWithPrior";
+    private final float EPSILON = 0.0001f;
+
+    /* If the is parameter is true, the final score would be a
+    linear combination of user model and prior model */
+    private final String USE_PRIOR = "usePriorInformation";
+
+    /* When prior model is used, this parmaeter will set the mixing factor, alpha. */
+    private final String SET_ALPHA = "setAlpha";
+
+    /* When prior model is used, If this parameter is true then algorithm will use
+    the automatic cross validated alpha for mixing user model and prior model */
+    private final String USE_AUTO_ALPHA = "useAutoAlpha";
+
+    /* When automatic cross validation is active, this parameter will
+    set the forget rate in cross validation. */
+    private final String SET_FORGET_RATE = "setForgetRate";
+
+    /* When automatic cross validation is active, this parameter will
+    set the minium number of required training pairs before using the user model */
+    private final String SET_MIN_TRAIN_PAIR = "setMinTrainingPair";
+
+    private final String SET_USER_PERF = "setUserPerformance";
+    private final String SET_PRIOR_PERF = "setPriorPerformance";
+    private final String SET_NUM_TRAIN_PAIR = "setNumberTrainingPairs";
+    private final String SET_AUTO_ALPHA = "setAutoAlpha";
+
+
+
+    private HashMap<String, Float> mPriorWeights = new HashMap<String, Float>();
+    private float mAlpha = 0;
+    private float mAutoAlpha = 0;
+    private float mForgetRate = 0;
+    private float mUserRankerPerf = 0;
+    private float mPriorRankerPerf = 0;
+    private int mMinReqTrainingPair = 0;
+    private int mNumTrainPair = 0;
+    private boolean mUsePrior = false;
+    private boolean mUseAutoAlpha = false;
+
+    static public class Model implements Serializable {
+        public StochasticLinearRanker.Model uModel = new StochasticLinearRanker.Model();
+        public HashMap<String, Float> priorWeights = new HashMap<String, Float>();
+        public HashMap<String, String> priorParameters = new HashMap<String, String>();
+    }
+
+    @Override
+    public void resetRanker(){
+        super.resetRanker();
+        mPriorWeights.clear();
+        mAlpha = 0;
+        mAutoAlpha = 0;
+        mForgetRate = 0;
+        mMinReqTrainingPair = 0;
+        mUserRankerPerf = 0;
+        mPriorRankerPerf = 0;
+        mNumTrainPair = 0;
+        mUsePrior = false;
+        mUseAutoAlpha = false;
+    }
+
+    @Override
+    public float scoreSample(String[] keys, float[] values) {
+        if (!mUsePrior){
+            return super.scoreSample(keys, values);
+        } else {
+            if (mUseAutoAlpha) {
+                if (mNumTrainPair > mMinReqTrainingPair)
+                    return (1 - mAutoAlpha) * super.scoreSample(keys,values) +
+                            mAutoAlpha * priorScoreSample(keys,values);
+                else
+                    return priorScoreSample(keys,values);
+            } else
+                return (1 - mAlpha) * super.scoreSample(keys,values) +
+                        mAlpha * priorScoreSample(keys,values);
+        }
+    }
+
+    public float priorScoreSample(String[] keys, float[] values) {
+        float score = 0;
+        for (int i=0; i< keys.length; i++){
+            if (mPriorWeights.get(keys[i]) != null )
+                score = score + mPriorWeights.get(keys[i]) * values[i];
+        }
+        return score;
+    }
+
+    @Override
+    public boolean updateClassifier(String[] keys_positive,
+                                    float[] values_positive,
+                                    String[] keys_negative,
+                                    float[] values_negative){
+        if (mUsePrior && mUseAutoAlpha && (mNumTrainPair > mMinReqTrainingPair))
+            updateAutoAlpha(keys_positive, values_positive, keys_negative, values_negative);
+        mNumTrainPair ++;
+        return super.updateClassifier(keys_positive, values_positive,
+                                      keys_negative, values_negative);
+    }
+
+    void updateAutoAlpha(String[] keys_positive,
+                     float[] values_positive,
+                     String[] keys_negative,
+                     float[] values_negative) {
+        float positiveUserScore = super.scoreSample(keys_positive, values_positive);
+        float negativeUserScore = super.scoreSample(keys_negative, values_negative);
+        float positivePriorScore = priorScoreSample(keys_positive, values_positive);
+        float negativePriorScore = priorScoreSample(keys_negative, values_negative);
+        float userDecision = 0;
+        float priorDecision = 0;
+        if (positiveUserScore > negativeUserScore)
+            userDecision = 1;
+        if (positivePriorScore > negativePriorScore)
+            priorDecision = 1;
+        mUserRankerPerf = (1 - mForgetRate) * mUserRankerPerf + userDecision;
+        mPriorRankerPerf = (1 - mForgetRate) * mPriorRankerPerf + priorDecision;
+        mAutoAlpha = (mPriorRankerPerf + EPSILON) / (mUserRankerPerf + mPriorRankerPerf + EPSILON);
+    }
+
+    public Model getModel(){
+        Model m = new Model();
+        m.uModel = super.getUModel();
+        m.priorWeights.putAll(mPriorWeights);
+        m.priorParameters.put(SET_ALPHA, String.valueOf(mAlpha));
+        m.priorParameters.put(SET_AUTO_ALPHA, String.valueOf(mAutoAlpha));
+        m.priorParameters.put(SET_FORGET_RATE, String.valueOf(mForgetRate));
+        m.priorParameters.put(SET_MIN_TRAIN_PAIR, String.valueOf(mMinReqTrainingPair));
+        m.priorParameters.put(SET_USER_PERF, String.valueOf(mUserRankerPerf));
+        m.priorParameters.put(SET_PRIOR_PERF, String.valueOf(mPriorRankerPerf));
+        m.priorParameters.put(SET_NUM_TRAIN_PAIR, String.valueOf(mNumTrainPair));
+        m.priorParameters.put(USE_AUTO_ALPHA, String.valueOf(mUseAutoAlpha));
+        m.priorParameters.put(USE_PRIOR, String.valueOf(mUsePrior));
+        return m;
+    }
+
+    public boolean loadModel(Model m) {
+        mPriorWeights.clear();
+        mPriorWeights.putAll(m.priorWeights);
+        for (Map.Entry<String, String> e : m.priorParameters.entrySet()) {
+            boolean res = setModelParameter(e.getKey(), e.getValue());
+            if (!res) return false;
+        }
+        return super.loadModel(m.uModel);
+    }
+
+    public boolean setModelPriorWeights(HashMap<String, Float> pw){
+        mPriorWeights.clear();
+        mPriorWeights.putAll(pw);
+        return true;
+    }
+
+    public boolean setModelParameter(String key, String value){
+        if (key.equals(USE_AUTO_ALPHA)){
+            mUseAutoAlpha = Boolean.parseBoolean(value);
+        } else if (key.equals(USE_PRIOR)){
+            mUsePrior = Boolean.parseBoolean(value);
+        } else if (key.equals(SET_ALPHA)){
+            mAlpha = Float.valueOf(value.trim()).floatValue();
+        }else if (key.equals(SET_AUTO_ALPHA)){
+            mAutoAlpha = Float.valueOf(value.trim()).floatValue();
+        }else if (key.equals(SET_FORGET_RATE)){
+            mForgetRate = Float.valueOf(value.trim()).floatValue();
+        }else if (key.equals(SET_MIN_TRAIN_PAIR)){
+            mMinReqTrainingPair = (int) Float.valueOf(value.trim()).floatValue();
+        }else if (key.equals(SET_USER_PERF)){
+            mUserRankerPerf = Float.valueOf(value.trim()).floatValue();
+        }else if (key.equals(SET_PRIOR_PERF)){
+            mPriorRankerPerf = Float.valueOf(value.trim()).floatValue();
+        }else if (key.equals(SET_NUM_TRAIN_PAIR)){
+            mNumTrainPair = (int) Float.valueOf(value.trim()).floatValue();
+        }else
+            return super.setModelParameter(key, value);
+        return true;
+    }
+
+    public void print(Model m){
+        super.print(m.uModel);
+        String Spw = "";
+        for (Map.Entry<String, Float> e : m.priorWeights.entrySet())
+            Spw = Spw + "<" + e.getKey() + "," + e.getValue() + "> ";
+        Log.i(TAG, "Prior model is " + Spw);
+        String Spp = "";
+        for (Map.Entry<String, String> e : m.priorParameters.entrySet())
+            Spp = Spp + "<" + e.getKey() + "," + e.getValue() + "> ";
+        Log.i(TAG, "Prior parameters are " + Spp);
+    }
+}