[automerger skipped] Merge "Enable extservices tests to generate android-mts-extservices as well as android-mts." am: aac9b2a3dd am: 72f992d54d am: 4ea6880f51 -s ours am: 2309bbd1d2 -s ours am: b675b7dd79 -s ours

am skip reason: Merged-In I48e48bdee8957ebbd4c5c3db95854fc0a0cdb13d with SHA-1 758193ee61 is already in history

Original change: https://android-review.googlesource.com/c/platform/external/libtextclassifier/+/1531241

Change-Id: I476b669d6dd1c5f57e48adde04d5a013280749f5
diff --git a/TEST_MAPPING b/TEST_MAPPING
index ac3e735..93ea6d6 100644
--- a/TEST_MAPPING
+++ b/TEST_MAPPING
@@ -17,5 +17,19 @@
     {
       "name": "TextClassifierNotificationTests"
     }
+  ],
+  "mainline-presubmit": [
+    {
+      "name": "TextClassifierNotificationTests[com.google.android.extservices.apex]"
+    },
+    {
+      "name": "TextClassifierServiceTest[com.google.android.extservices.apex]"
+    },
+    {
+      "name": "libtextclassifier_tests[com.google.android.extservices.apex]"
+    },
+    {
+      "name": "libtextclassifier_java_tests[com.google.android.extservices.apex]"
+    }
   ]
 }
\ No newline at end of file
diff --git a/java/Android.bp b/java/Android.bp
index 30fd2bc..ca34a66 100644
--- a/java/Android.bp
+++ b/java/Android.bp
@@ -52,10 +52,7 @@
 // Similar to TextClassifierServiceLib, but without the AndroidManifest.
 android_library {
     name: "TextClassifierServiceLibNoManifest",
-    srcs: [
-        "src/**/*.java",
-        "src/**/*.aidl",
-    ],
+    srcs: ["src/**/*.java"],
     manifest: "LibNoManifest_AndroidManifest.xml",
     static_libs: [
         "androidx.core_core",
@@ -64,11 +61,6 @@
         "guava",
         "textclassifier-statsd",
         "error_prone_annotations",
-        "androidx.work_work-runtime",
-        "android_downloader_lib",
-        "textclassifier-statsd",
-        "textclassifier-java-proto-lite",
-        "androidx.concurrent_concurrent-futures",
     ],
     sdk_version: "system_current",
     min_sdk_version: "30",
diff --git a/java/AndroidManifest.xml b/java/AndroidManifest.xml
index 083991c..8ef323c 100644
--- a/java/AndroidManifest.xml
+++ b/java/AndroidManifest.xml
@@ -31,35 +31,19 @@
 
     <uses-permission android:name="android.permission.QUERY_ALL_PACKAGES" />
     <uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
-    <uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
-    <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
 
-    <!-- The INTERNET permission is restricted to the modelDownloaderServiceProcess -->
-    <uses-permission android:name="android.permission.INTERNET"/>
 
     <application>
-        <processes>
-            <process>
-                <deny-permission android:name="android.permission.INTERNET" />
-            </process>
-            <process android:process=":modelDownloaderServiceProcess">
-                <allow-permission android:name="android.permission.INTERNET" />
-            </process>
-        </processes>
 
         <service
             android:exported="true"
+            android:directBootAware="false"
             android:name=".DefaultTextClassifierService"
             android:permission="android.permission.BIND_TEXTCLASSIFIER_SERVICE">
             <intent-filter>
                 <action android:name="android.service.textclassifier.TextClassifierService"/>
             </intent-filter>
         </service>
-        <service
-            android:exported="false"
-            android:name=".ModelDownloaderService"
-            android:process=":modelDownloaderServiceProcess">
-        </service>
     </application>
 
 </manifest>
diff --git a/java/src/com/android/textclassifier/AbstractDownloadWorker.java b/java/src/com/android/textclassifier/AbstractDownloadWorker.java
deleted file mode 100644
index 43150fc..0000000
--- a/java/src/com/android/textclassifier/AbstractDownloadWorker.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import android.content.Context;
-import androidx.work.Data;
-import androidx.work.ListenableWorker;
-import androidx.work.WorkerParameters;
-import com.android.textclassifier.common.base.TcLog;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.FluentFuture;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import java.io.File;
-import java.net.URI;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-/**
- * Abstract worker to download specified manifest/model. Subclasses only need to implement the logic
- * to handle the downloaded file. Scheduled/executed by WorkManager.
- */
-abstract class AbstractDownloadWorker extends ListenableWorker {
-  private static final String TAG = "DownloadWorker";
-
-  @VisibleForTesting static final String DATA_URL_KEY = "DownloadWorker_url";
-
-  @VisibleForTesting
-  static final String DATA_DESTINATION_PATH_KEY = "DownloadWorker_destinationPath";
-
-  @VisibleForTesting
-  static final String DATA_REUSE_EXISTING_FILE_KEY = "DownloadWorker_reuseExistingFile";
-
-  @VisibleForTesting
-  static final String DATA_MAX_DOWNLOAD_ATTEMPTS_KEY = "DownloadWorker_maxAttempts";
-
-  private static final boolean DATA_REUSE_EXISTING_FILE_DEFAULT = false;
-  private static final int DATA_MAX_DOWNLOAD_ATTEMPTS_DEFAULT = 5;
-
-  private final String url;
-  private final String destinationPath;
-  private final boolean reuseExistingFile;
-  private final int maxDownloadAttempts;
-
-  // TODO(licha): Maybe create some static executors and share them across tcs
-  private final ExecutorService bgExecutorService;
-  private final ModelDownloader downloader;
-
-  AbstractDownloadWorker(Context context, WorkerParameters workerParams) {
-    this(context, workerParams, Executors.newSingleThreadExecutor());
-  }
-
-  private AbstractDownloadWorker(
-      Context context, WorkerParameters workerParams, ExecutorService bgExecutorService) {
-    this(
-        context,
-        workerParams,
-        bgExecutorService,
-        new ModelDownloaderImpl(context, bgExecutorService));
-  }
-
-  @VisibleForTesting
-  AbstractDownloadWorker(
-      Context context,
-      WorkerParameters workerParams,
-      ExecutorService bgExecutorService,
-      ModelDownloader downloader) {
-    super(context, workerParams);
-
-    this.url = Preconditions.checkNotNull(getInputData().getString(DATA_URL_KEY));
-    this.destinationPath =
-        Preconditions.checkNotNull(getInputData().getString(DATA_DESTINATION_PATH_KEY));
-    this.reuseExistingFile =
-        getInputData().getBoolean(DATA_REUSE_EXISTING_FILE_KEY, DATA_REUSE_EXISTING_FILE_DEFAULT);
-    this.maxDownloadAttempts =
-        getInputData().getInt(DATA_MAX_DOWNLOAD_ATTEMPTS_KEY, DATA_MAX_DOWNLOAD_ATTEMPTS_DEFAULT);
-
-    this.bgExecutorService = Preconditions.checkNotNull(bgExecutorService);
-    this.downloader = Preconditions.checkNotNull(downloader);
-  }
-
-  @Override
-  public final ListenableFuture<ListenableWorker.Result> startWork() {
-    TcLog.d(
-        TAG,
-        String.format(
-            "Start download: from %s to %s, attempt:%d",
-            url, destinationPath, getRunAttemptCount()));
-    if (getRunAttemptCount() >= maxDownloadAttempts) {
-      TcLog.d(TAG, "Max attempt reached. Abort download task.");
-      return Futures.immediateFuture(ListenableWorker.Result.failure());
-    }
-
-    File targetFile = new File(destinationPath);
-    ListenableFuture<Long> downloadFuture =
-        (reuseExistingFile && targetFile.exists())
-            ? Futures.immediateFuture(targetFile.length())
-            : downloader.download(URI.create(url), targetFile);
-
-    return FluentFuture.from(downloadFuture)
-        .transform(
-            unusedBytesWritten -> {
-              if (!targetFile.exists()) {
-                throw new IllegalStateException("Download succeeded but target file not found.");
-              }
-              handleDownloadedFile(targetFile);
-              return ListenableWorker.Result.success();
-            },
-            bgExecutorService)
-        .catching(
-            Throwable.class,
-            e -> {
-              TcLog.e(TAG, "Download attempt failed.", e);
-              // Always delete downlaoded file if the work fails.
-              targetFile.delete();
-              // Retry until reach max allowed attempts (attempt starts from 0)
-              // The backoff time between two tries will grow exponentially (i.e. 30s, 1min,
-              // 2min, 4min). This is configurable when building the request.
-              return ListenableWorker.Result.retry();
-            },
-            bgExecutorService);
-  }
-
-  /**
-   * Subclass Workers should override (and only override) this method to handle downloaded file
-   * (e.g. validation, rename). They should throw unchecked Exceptions if failure occurs.
-   */
-  abstract Void handleDownloadedFile(File downloadedFile);
-
-  /**
-   * This method will be called when we our work gets interrupted by the system. Result future
-   * should have already been cancelled in that case. Unless it's because the REPLACE policy of
-   * WorkManager unique queue, the interrupted work will be rescheduled later.
-   */
-  @Override
-  public final void onStopped() {
-    TcLog.d(
-        TAG,
-        String.format(
-            "Stop download: from %s to %s, attempt:%d",
-            url, destinationPath, getRunAttemptCount()));
-    bgExecutorService.shutdown();
-  }
-
-  /**
-   * Helper to create a base input Data builder.
-   *
-   * @param url the URL from where to download content
-   * @param destinationPath the path on the device to store the downlaoded file
-   * @param reuseExistingFile if True, we will skip the download if a file exists in destinationPath
-   * @param maxDownloadAttempts max times to try before we abort this download task
-   */
-  static final Data.Builder createInputDataBuilder(
-      String url, String destinationPath, boolean reuseExistingFile, int maxDownloadAttempts) {
-    return new Data.Builder()
-        .putString(DATA_URL_KEY, url)
-        .putString(DATA_DESTINATION_PATH_KEY, destinationPath)
-        .putBoolean(DATA_REUSE_EXISTING_FILE_KEY, reuseExistingFile)
-        .putInt(DATA_MAX_DOWNLOAD_ATTEMPTS_KEY, maxDownloadAttempts);
-  }
-}
diff --git a/java/src/com/android/textclassifier/ActionsSuggestionsHelper.java b/java/src/com/android/textclassifier/ActionsSuggestionsHelper.java
index 4838503..beb155b 100644
--- a/java/src/com/android/textclassifier/ActionsSuggestionsHelper.java
+++ b/java/src/com/android/textclassifier/ActionsSuggestionsHelper.java
@@ -27,7 +27,7 @@
 import android.view.textclassifier.ConversationAction;
 import android.view.textclassifier.ConversationActions;
 import android.view.textclassifier.ConversationActions.Message;
-import com.android.textclassifier.ModelFileManager.ModelFile;
+import com.android.textclassifier.common.ModelFileManager.ModelFile;
 import com.android.textclassifier.common.base.TcLog;
 import com.android.textclassifier.common.intent.LabeledIntent;
 import com.android.textclassifier.common.intent.TemplateIntentFactory;
diff --git a/java/src/com/android/textclassifier/DefaultTextClassifierService.java b/java/src/com/android/textclassifier/DefaultTextClassifierService.java
index ca48a90..1f1e958 100644
--- a/java/src/com/android/textclassifier/DefaultTextClassifierService.java
+++ b/java/src/com/android/textclassifier/DefaultTextClassifierService.java
@@ -16,21 +16,23 @@
 
 package com.android.textclassifier;
 
-import android.content.BroadcastReceiver;
 import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
 import android.os.CancellationSignal;
 import android.service.textclassifier.TextClassifierService;
 import android.view.textclassifier.ConversationActions;
 import android.view.textclassifier.SelectionEvent;
 import android.view.textclassifier.TextClassification;
+import android.view.textclassifier.TextClassificationContext;
 import android.view.textclassifier.TextClassificationSessionId;
 import android.view.textclassifier.TextClassifierEvent;
 import android.view.textclassifier.TextLanguage;
 import android.view.textclassifier.TextLinks;
 import android.view.textclassifier.TextSelection;
-import androidx.work.WorkManager;
+import androidx.annotation.NonNull;
+import androidx.collection.LruCache;
+import com.android.textclassifier.common.ModelFileManager;
+import com.android.textclassifier.common.TextClassifierServiceExecutors;
+import com.android.textclassifier.common.TextClassifierSettings;
 import com.android.textclassifier.common.base.TcLog;
 import com.android.textclassifier.common.statsd.TextClassifierApiUsageLogger;
 import com.android.textclassifier.utils.IndentingPrintWriter;
@@ -41,12 +43,11 @@
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import java.io.FileDescriptor;
 import java.io.PrintWriter;
+import java.util.Map;
 import java.util.concurrent.Callable;
 import java.util.concurrent.Executor;
-import java.util.concurrent.Executors;
 import javax.annotation.Nullable;
 
 /** An implementation of a TextClassifierService. */
@@ -57,13 +58,10 @@
   // TODO: Figure out do we need more concurrency.
   private ListeningExecutorService normPriorityExecutor;
   private ListeningExecutorService lowPriorityExecutor;
-
-  @Nullable private ModelDownloadManager modelDownloadManager;
-
   private TextClassifierImpl textClassifier;
   private TextClassifierSettings settings;
   private ModelFileManager modelFileManager;
-  private BroadcastReceiver localeChangedReceiver;
+  private LruCache<TextClassificationSessionId, TextClassificationContext> sessionIdToContext;
 
   public DefaultTextClassifierService() {
     this.injector = new InjectorImpl(this);
@@ -85,31 +83,25 @@
     normPriorityExecutor = injector.createNormPriorityExecutor();
     lowPriorityExecutor = injector.createLowPriorityExecutor();
     textClassifier = injector.createTextClassifierImpl(settings, modelFileManager);
-    localeChangedReceiver = new LocaleChangedReceiver(modelFileManager);
-
-    if (settings.isModelDownloadManagerEnabled()) {
-      modelDownloadManager =
-          new ModelDownloadManager(
-              WorkManager.getInstance(this),
-              ManifestDownloadWorker.class,
-              modelFileManager,
-              settings,
-              lowPriorityExecutor);
-      modelDownloadManager.init();
-    }
-
+    sessionIdToContext = new LruCache<>(settings.getSessionIdToContextCacheSize());
     textClassifierApiUsageLogger =
         injector.createTextClassifierApiUsageLogger(settings, lowPriorityExecutor);
-
-    injector
-        .getContext()
-        .registerReceiver(localeChangedReceiver, new IntentFilter(Intent.ACTION_LOCALE_CHANGED));
   }
 
   @Override
   public void onDestroy() {
     super.onDestroy();
-    injector.getContext().unregisterReceiver(localeChangedReceiver);
+  }
+
+  @Override
+  public void onCreateTextClassificationSession(
+      @NonNull TextClassificationContext context, @NonNull TextClassificationSessionId sessionId) {
+    sessionIdToContext.put(sessionId, context);
+  }
+
+  @Override
+  public void onDestroyTextClassificationSession(@NonNull TextClassificationSessionId sessionId) {
+    sessionIdToContext.remove(sessionId);
   }
 
   @Override
@@ -119,7 +111,9 @@
       CancellationSignal cancellationSignal,
       Callback<TextSelection> callback) {
     handleRequestAsync(
-        () -> textClassifier.suggestSelection(request),
+        () ->
+            textClassifier.suggestSelection(
+                sessionId, sessionIdToTextClassificationContext(sessionId), request),
         callback,
         textClassifierApiUsageLogger.createSession(
             TextClassifierApiUsageLogger.API_TYPE_SUGGEST_SELECTION, sessionId),
@@ -133,7 +127,9 @@
       CancellationSignal cancellationSignal,
       Callback<TextClassification> callback) {
     handleRequestAsync(
-        () -> textClassifier.classifyText(request),
+        () ->
+            textClassifier.classifyText(
+                sessionId, sessionIdToTextClassificationContext(sessionId), request),
         callback,
         textClassifierApiUsageLogger.createSession(
             TextClassifierApiUsageLogger.API_TYPE_CLASSIFY_TEXT, sessionId),
@@ -147,7 +143,9 @@
       CancellationSignal cancellationSignal,
       Callback<TextLinks> callback) {
     handleRequestAsync(
-        () -> textClassifier.generateLinks(request),
+        () ->
+            textClassifier.generateLinks(
+                sessionId, sessionIdToTextClassificationContext(sessionId), request),
         callback,
         textClassifierApiUsageLogger.createSession(
             TextClassifierApiUsageLogger.API_TYPE_GENERATE_LINKS, sessionId),
@@ -161,7 +159,9 @@
       CancellationSignal cancellationSignal,
       Callback<ConversationActions> callback) {
     handleRequestAsync(
-        () -> textClassifier.suggestConversationActions(request),
+        () ->
+            textClassifier.suggestConversationActions(
+                sessionId, sessionIdToTextClassificationContext(sessionId), request),
         callback,
         textClassifierApiUsageLogger.createSession(
             TextClassifierApiUsageLogger.API_TYPE_SUGGEST_CONVERSATION_ACTIONS, sessionId),
@@ -175,7 +175,9 @@
       CancellationSignal cancellationSignal,
       Callback<TextLanguage> callback) {
     handleRequestAsync(
-        () -> textClassifier.detectLanguage(request),
+        () ->
+            textClassifier.detectLanguage(
+                sessionId, sessionIdToTextClassificationContext(sessionId), request),
         callback,
         textClassifierApiUsageLogger.createSession(
             TextClassifierApiUsageLogger.API_TYPE_DETECT_LANGUAGES, sessionId),
@@ -184,7 +186,7 @@
 
   @Override
   public void onSelectionEvent(TextClassificationSessionId sessionId, SelectionEvent event) {
-    handleEvent(() -> textClassifier.onSelectionEvent(event));
+    handleEvent(() -> textClassifier.onSelectionEvent(sessionId, event));
   }
 
   @Override
@@ -198,9 +200,24 @@
     IndentingPrintWriter indentingPrintWriter = new IndentingPrintWriter(writer);
     // TODO(licha): Also dump ModelDownloadManager for debugging
     textClassifier.dump(indentingPrintWriter);
+    dumpImpl(indentingPrintWriter);
     indentingPrintWriter.flush();
   }
 
+  private void dumpImpl(IndentingPrintWriter printWriter) {
+    printWriter.println("DefaultTextClassifierService:");
+    printWriter.increaseIndent();
+    printWriter.println("sessionIdToContext:");
+    printWriter.increaseIndent();
+    for (Map.Entry<TextClassificationSessionId, TextClassificationContext> entry :
+        sessionIdToContext.snapshot().entrySet()) {
+      printWriter.printPair(entry.getKey().getValue(), entry.getValue());
+    }
+    printWriter.decreaseIndent();
+    printWriter.decreaseIndent();
+    printWriter.println();
+  }
+
   private <T> void handleRequestAsync(
       Callable<T> callable,
       Callback<T> callback,
@@ -248,20 +265,13 @@
         MoreExecutors.directExecutor());
   }
 
-  /**
-   * Receiver listening to locale change event. Ask ModelFileManager to do clean-up upon receiving.
-   */
-  static class LocaleChangedReceiver extends BroadcastReceiver {
-    private final ModelFileManager modelFileManager;
-
-    LocaleChangedReceiver(ModelFileManager modelFileManager) {
-      this.modelFileManager = modelFileManager;
+  @Nullable
+  private TextClassificationContext sessionIdToTextClassificationContext(
+      @Nullable TextClassificationSessionId sessionId) {
+    if (sessionId == null) {
+      return null;
     }
-
-    @Override
-    public void onReceive(Context context, Intent intent) {
-      modelFileManager.deleteUnusedModelFiles();
-    }
+    return sessionIdToContext.get(sessionId);
   }
 
   // Do not call any of these methods, except the constructor, before Service.onCreate is called.
@@ -296,23 +306,12 @@
 
     @Override
     public ListeningExecutorService createNormPriorityExecutor() {
-      return MoreExecutors.listeningDecorator(
-          Executors.newFixedThreadPool(
-              /* nThreads= */ 2,
-              new ThreadFactoryBuilder()
-                  .setNameFormat("tcs-norm-prio-executor")
-                  .setPriority(Thread.NORM_PRIORITY)
-                  .build()));
+      return TextClassifierServiceExecutors.getNormhPriorityExecutor();
     }
 
     @Override
     public ListeningExecutorService createLowPriorityExecutor() {
-      return MoreExecutors.listeningDecorator(
-          Executors.newSingleThreadExecutor(
-              new ThreadFactoryBuilder()
-                  .setNameFormat("tcs-low-prio-executor")
-                  .setPriority(Thread.NORM_PRIORITY - 1)
-                  .build()));
+      return TextClassifierServiceExecutors.getLowPriorityExecutor();
     }
 
     @Override
diff --git a/java/src/com/android/textclassifier/IModelDownloaderService.aidl b/java/src/com/android/textclassifier/IModelDownloaderService.aidl
deleted file mode 100644
index d69f5ca..0000000
--- a/java/src/com/android/textclassifier/IModelDownloaderService.aidl
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import com.android.textclassifier.IModelDownloaderCallback;
-
-/**
- * ModelDownloaderService binder interface.
- */
-oneway interface IModelDownloaderService {
-
-  /**
-   * @param url the full url to download model from
-   * @param targetFilePath the absolute file path for the destination file
-   * @param callback callback to notify caller the downloader result
-   */
-  void download(
-      String url, String targetFilePath, IModelDownloaderCallback callback);
-}
\ No newline at end of file
diff --git a/java/src/com/android/textclassifier/ManifestDownloadWorker.java b/java/src/com/android/textclassifier/ManifestDownloadWorker.java
deleted file mode 100644
index f067ccf..0000000
--- a/java/src/com/android/textclassifier/ManifestDownloadWorker.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import android.content.Context;
-import androidx.work.Constraints;
-import androidx.work.Data;
-import androidx.work.ExistingWorkPolicy;
-import androidx.work.ListenableWorker;
-import androidx.work.NetworkType;
-import androidx.work.OneTimeWorkRequest;
-import androidx.work.WorkManager;
-import androidx.work.WorkerParameters;
-import com.android.textclassifier.ModelFileManager.ModelType;
-import com.android.textclassifier.common.base.TcLog;
-import com.android.textclassifier.protobuf.ExtensionRegistryLite;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.EnumBiMap;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.util.concurrent.Futures;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.time.Duration;
-
-/** Worker to download/parse models' manifest file and schedule the acutal model download task. */
-public final class ManifestDownloadWorker extends AbstractDownloadWorker {
-  private static final String TAG = "ManifestDownloadWorker";
-  private static final String DATA_MODEL_TYPE_KEY = "ManifestDownloadWorker_modelType";
-  private static final String DATA_MODEL_LANGUAGE_TAG_KEY =
-      "ManifestDownloadWorker_modelLanguageTag";
-  private static final String DATA_MANIFEST_URL_KEY = "ManifestDownloadWorker_manifestUrl";
-  private static final String DATA_TARGET_MODEL_PATH_KEY = "ManifestDownloadWorker_targetModelPath";
-
-  private static final EnumBiMap<ModelManifest.NetworkType, NetworkType> NETWORK_TYPE_MAP =
-      EnumBiMap.create(
-          ImmutableMap.of(
-              ModelManifest.NetworkType.UNMETERED, NetworkType.UNMETERED,
-              ModelManifest.NetworkType.METERED, NetworkType.METERED,
-              ModelManifest.NetworkType.NOT_REQUIRED, NetworkType.NOT_REQUIRED,
-              ModelManifest.NetworkType.NOT_ROAMING, NetworkType.NOT_ROAMING,
-              ModelManifest.NetworkType.CONNECTED, NetworkType.CONNECTED));
-
-  private final String modelType;
-  private final String modelLanguageTag;
-  private final String manifestUrl;
-  private final String targetModelPath;
-
-  private final Context context;
-  private final Class<? extends ListenableWorker> modelDownloadWorkerClass;
-  private final WorkManager workManager;
-
-  public ManifestDownloadWorker(Context context, WorkerParameters workerParams) {
-    this(context, workerParams, ModelDownloadWorker.class);
-  }
-
-  @VisibleForTesting
-  ManifestDownloadWorker(
-      Context context,
-      WorkerParameters workerParams,
-      Class<? extends ListenableWorker> modelDownloadWorkerClass) {
-    super(context, workerParams);
-
-    this.modelType = Preconditions.checkNotNull(getInputData().getString(DATA_MODEL_TYPE_KEY));
-    this.modelLanguageTag =
-        Preconditions.checkNotNull(getInputData().getString(DATA_MODEL_LANGUAGE_TAG_KEY));
-    this.manifestUrl = Preconditions.checkNotNull(getInputData().getString(DATA_MANIFEST_URL_KEY));
-    this.targetModelPath =
-        Preconditions.checkNotNull(getInputData().getString(DATA_TARGET_MODEL_PATH_KEY));
-
-    this.context = Preconditions.checkNotNull(context);
-    this.modelDownloadWorkerClass = Preconditions.checkNotNull(modelDownloadWorkerClass);
-    this.workManager = Preconditions.checkNotNull(WorkManager.getInstance(context));
-  }
-
-  @Override
-  public Void handleDownloadedFile(File manifestFile) {
-    TcLog.d(TAG, "Start to parse model manifest: " + manifestFile.getAbsolutePath());
-    ModelManifest modelManifest;
-    try {
-      modelManifest =
-          ModelManifest.parseFrom(
-              new FileInputStream(manifestFile), ExtensionRegistryLite.getEmptyRegistry());
-    } catch (IOException e) {
-      throw new IllegalStateException("Failed to parse the manifest file.", e);
-    }
-
-    Preconditions.checkState(modelManifest.getModelsCount() == 1);
-    ModelManifest.Model model = modelManifest.getModels(0);
-    Preconditions.checkState(
-        model.getUrl().startsWith(ModelDownloadManager.TEXT_CLASSIFIER_URL_PREFIX));
-    Preconditions.checkState(model.getSizeInBytes() > 0 && !model.getFingerprint().isEmpty());
-
-    File targetModelFile = new File(targetModelPath);
-    File pendingModelFile = new File(context.getCacheDir(), targetModelFile.getName() + ".pending");
-    OneTimeWorkRequest modelDownloadRequest =
-        new OneTimeWorkRequest.Builder(modelDownloadWorkerClass)
-            .setInputData(
-                ModelDownloadWorker.createInputData(
-                    model.getUrl(),
-                    model.getSizeInBytes(),
-                    model.getFingerprint(),
-                    manifestFile.getAbsolutePath(),
-                    pendingModelFile.getAbsolutePath(),
-                    targetModelPath,
-                    /* maxDownloadAttempts= */ 5,
-                    /* reuseExistingModelFile= */ false))
-            .addTag(manifestUrl)
-            .setConstraints(
-                new Constraints.Builder()
-                    .setRequiredNetworkType(
-                        NETWORK_TYPE_MAP.get(modelManifest.getRequiredNetworkType()))
-                    .setRequiresBatteryNotLow(modelManifest.getRequiresBatteryNotLow())
-                    .setRequiresCharging(modelManifest.getRequiresCharging())
-                    .setRequiresDeviceIdle(modelManifest.getRequiresDeviceIdle())
-                    .setRequiresStorageNotLow(modelManifest.getRequiresStorageNotLow())
-                    .build())
-            .keepResultsForAtLeast(
-                Duration.ofDays(ModelDownloadManager.DAYS_TO_KEEP_THE_DOWNLOAD_RESULT))
-            .build();
-
-    // Enqueue chained requests to a unique queue (different from the manifest queue)
-    Futures.getUnchecked(
-        workManager
-            .enqueueUniqueWork(
-                ModelDownloadManager.getModelUniqueWorkName(modelType, modelLanguageTag),
-                ExistingWorkPolicy.REPLACE,
-                modelDownloadRequest)
-            .getResult());
-    return null;
-  }
-
-  /** Creates input Data for a ManifestDownloadWorker. */
-  public static Data createInputData(
-      @ModelType.ModelTypeDef String modelType,
-      String modelLanguageTag,
-      String manifestUrl,
-      String targetManifestPath,
-      String targetModelPath,
-      int maxDownloadAttempts,
-      boolean reuseExistingManifestFile) {
-    return AbstractDownloadWorker.createInputDataBuilder(
-            manifestUrl, targetManifestPath, reuseExistingManifestFile, maxDownloadAttempts)
-        .putString(DATA_MODEL_TYPE_KEY, modelType)
-        .putString(DATA_MODEL_LANGUAGE_TAG_KEY, modelLanguageTag)
-        .putString(DATA_MANIFEST_URL_KEY, manifestUrl)
-        .putString(DATA_TARGET_MODEL_PATH_KEY, targetModelPath)
-        .build();
-  }
-}
diff --git a/java/src/com/android/textclassifier/ModelDownloadManager.java b/java/src/com/android/textclassifier/ModelDownloadManager.java
deleted file mode 100644
index 1e7879a..0000000
--- a/java/src/com/android/textclassifier/ModelDownloadManager.java
+++ /dev/null
@@ -1,258 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import android.os.LocaleList;
-import android.provider.DeviceConfig;
-import android.text.TextUtils;
-import androidx.work.Constraints;
-import androidx.work.ExistingWorkPolicy;
-import androidx.work.ListenableWorker;
-import androidx.work.OneTimeWorkRequest;
-import androidx.work.WorkInfo;
-import androidx.work.WorkManager;
-import androidx.work.WorkQuery;
-import com.android.textclassifier.ModelFileManager.ModelType;
-import com.android.textclassifier.common.base.TcLog;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import java.io.File;
-import java.time.Duration;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Locale;
-import java.util.concurrent.ExecutionException;
-
-/** Manager to listen to config update and download latest models. */
-final class ModelDownloadManager {
-  private static final String TAG = "ModelDownloadManager";
-
-  static final String UNIVERSAL_MODEL_LANGUAGE_TAG = "universal";
-  static final String TEXT_CLASSIFIER_URL_PREFIX =
-      "https://www.gstatic.com/android/text_classifier/";
-  static final long DAYS_TO_KEEP_THE_DOWNLOAD_RESULT = 28L;
-
-  private final Object lock = new Object();
-
-  private final WorkManager workManager;
-  private final Class<? extends ListenableWorker> manifestDownloadWorkerClass;
-  private final ModelFileManager modelFileManager;
-  private final TextClassifierSettings settings;
-  private final ListeningExecutorService executorService;
-  private final DeviceConfig.OnPropertiesChangedListener deviceConfigListener;
-
-  /**
-   * Constructor for ModelDownloadManager.
-   *
-   * @param workManager singleton WorkManager instance
-   * @param manifestDownloadWorkerClass WorkManager's Worker class to download model manifest and
-   *     schedule the actual model download work
-   * @param modelFileManager ModelFileManager to interact with storage layer
-   * @param settings TextClassifierSettings to access DeviceConfig and other settings
-   * @param executorService background executor service
-   */
-  public ModelDownloadManager(
-      WorkManager workManager,
-      Class<? extends ListenableWorker> manifestDownloadWorkerClass,
-      ModelFileManager modelFileManager,
-      TextClassifierSettings settings,
-      ListeningExecutorService executorService) {
-    this.workManager = Preconditions.checkNotNull(workManager);
-    this.manifestDownloadWorkerClass = Preconditions.checkNotNull(manifestDownloadWorkerClass);
-    this.modelFileManager = Preconditions.checkNotNull(modelFileManager);
-    this.settings = Preconditions.checkNotNull(settings);
-    this.executorService = Preconditions.checkNotNull(executorService);
-
-    this.deviceConfigListener =
-        new DeviceConfig.OnPropertiesChangedListener() {
-          @Override
-          public void onPropertiesChanged(DeviceConfig.Properties unused) {
-            // Trigger the check even when the change is unrelated just in case we missed a previous
-            // update
-            checkConfigAndScheduleDownloads();
-          }
-        };
-  }
-
-  /**
-   * Registers a listener to related DeviceConfig flag changes. Will also download models with
-   * {@code executorService} if necessary.
-   */
-  public void init() {
-    DeviceConfig.addOnPropertiesChangedListener(
-        DeviceConfig.NAMESPACE_TEXTCLASSIFIER, executorService, deviceConfigListener);
-    TcLog.d(TAG, "DeviceConfig listener registered by ModelDownloadManager");
-    // Check flags in background, in case any updates heppened before the TCS starts
-    executorService.execute(this::checkConfigAndScheduleDownloads);
-  }
-
-  /** Un-register the listener to DeviceConfig. */
-  public void destroy() {
-    DeviceConfig.removeOnPropertiesChangedListener(deviceConfigListener);
-    TcLog.d(TAG, "DeviceConfig listener unregistered by ModelDownloadeManager");
-  }
-
-  /**
-   * Check DeviceConfig and schedule new model download requests synchronously. This method is
-   * synchronized and contains blocking operations, only call it in a background executor.
-   */
-  private void checkConfigAndScheduleDownloads() {
-    TcLog.v(TAG, "Checking DeviceConfig to see whether there are new models to download");
-    synchronized (lock) {
-      List<Locale.LanguageRange> languageRanges =
-          Locale.LanguageRange.parse(LocaleList.getAdjustedDefault().toLanguageTags());
-      for (String modelType : ModelType.values()) {
-        // Notice: Be careful of the Locale.lookupTag() matching logic: 1) it will convert the tag
-        // to lower-case only; 2) it matches tags from tail to head and does not allow missing
-        // pieces. E.g. if your system locale is zh-hans-cn, it won't match zh-cn.
-        String bestTag =
-            Locale.lookupTag(
-                languageRanges, settings.getLanguageTagsForManifestURLSuffix(modelType));
-        String modelLanguageTag = bestTag != null ? bestTag : UNIVERSAL_MODEL_LANGUAGE_TAG;
-
-        // One manifest url suffix can uniquely identify a model in the world
-        String manifestUrlSuffix = settings.getManifestURLSuffix(modelType, modelLanguageTag);
-        if (TextUtils.isEmpty(manifestUrlSuffix)) {
-          continue;
-        }
-        String manifestUrl = TEXT_CLASSIFIER_URL_PREFIX + manifestUrlSuffix;
-
-        // Check whether a manifest or a model is in the queue/in the middle of downloading. Both
-        // manifest/model works are tagged with the manifest URL.
-        WorkQuery workQuery =
-            WorkQuery.Builder.fromTags(ImmutableList.of(manifestUrl))
-                .addStates(
-                    Arrays.asList(
-                        WorkInfo.State.BLOCKED, WorkInfo.State.ENQUEUED, WorkInfo.State.RUNNING))
-                .build();
-        try {
-          List<WorkInfo> workInfos = workManager.getWorkInfos(workQuery).get();
-          if (!workInfos.isEmpty()) {
-            TcLog.v(TAG, "Target model is already in the download queue.");
-            continue;
-          }
-        } catch (ExecutionException | InterruptedException e) {
-          TcLog.e(TAG, "Failed to query queued requests. Ignore and continue.", e);
-        }
-
-        // Target file's name has the url suffix encoded in it
-        File targetModelFile = modelFileManager.getDownloadTargetFile(modelType, manifestUrlSuffix);
-        if (!targetModelFile.getParentFile().exists()) {
-          if (!targetModelFile.getParentFile().mkdirs()) {
-            TcLog.e(TAG, "Failed to create " + targetModelFile.getParentFile().getAbsolutePath());
-            continue;
-          }
-        }
-        // TODO(licha): Ideally, we should verify whether the existing file can be loaded
-        // successfully
-        // Notes: We also don't check factory models and models downloaded by ConfigUpdater. But
-        // this is probablly fine because it's unlikely to have an overlap.
-        if (targetModelFile.exists()) {
-          TcLog.v(TAG, "Target model is already in the storage.");
-          continue;
-        }
-
-        // Skip models downloaded successfully in (at least) past DAYS_TO_KEEP_THE_DOWNLOAD_RESULT
-        // Because we delete less-preferred models after one model downloaded, it's possible that
-        // we fall in a loop (download - delete - download again) if P/H flag is in a bad state.
-        // NOTICE: Because we use an unique work name here, if we download model-1 first and then
-        // model-2, then model-1's WorkInfo will be lost. In that case, if the flag goes back to
-        // model-1, we will download it again even if it's within DAYS_TO_KEEP_THE_DOWNLOAD_RESULT
-        WorkQuery downlaodedBeforeWorkQuery =
-            WorkQuery.Builder.fromTags(ImmutableList.of(manifestUrl))
-                .addStates(ImmutableList.of(WorkInfo.State.SUCCEEDED))
-                .addUniqueWorkNames(
-                    ImmutableList.of(getModelUniqueWorkName(modelType, modelLanguageTag)))
-                .build();
-        try {
-          List<WorkInfo> downloadedBeforeWorkInfos =
-              workManager.getWorkInfos(downlaodedBeforeWorkQuery).get();
-          if (!downloadedBeforeWorkInfos.isEmpty()) {
-            TcLog.v(TAG, "The model was downloaded successfully before and got cleaned-up later");
-            continue;
-          }
-        } catch (ExecutionException | InterruptedException e) {
-          TcLog.e(TAG, "Failed to query queued requests. Ignore and continue.", e);
-        }
-
-        String targetModelPath = targetModelFile.getAbsolutePath();
-        String targetManifestPath = getTargetManifestPath(targetModelPath);
-        OneTimeWorkRequest manifestDownloadRequest =
-            new OneTimeWorkRequest.Builder(manifestDownloadWorkerClass)
-                .setInputData(
-                    ManifestDownloadWorker.createInputData(
-                        modelType,
-                        modelLanguageTag,
-                        manifestUrl,
-                        targetManifestPath,
-                        targetModelPath,
-                        settings.getModelDownloadMaxAttempts(),
-                        /* reuseExistingManifestFile= */ true))
-                .addTag(manifestUrl)
-                .setConstraints(
-                    new Constraints.Builder()
-                        .setRequiredNetworkType(settings.getManifestDownloadRequiredNetworkType())
-                        .setRequiresBatteryNotLow(true)
-                        .setRequiresStorageNotLow(true)
-                        .build())
-                .keepResultsForAtLeast(Duration.ofDays(DAYS_TO_KEEP_THE_DOWNLOAD_RESULT))
-                .build();
-
-        // When we enqueue a new request, existing pending request in the same queue will be
-        // cancelled. With this, device will be able to abort previous unfinished downloads
-        // (e.g. 711) when a fresher model is already(e.g. v712).
-        try {
-          // Block until we enqueue the request successfully
-          workManager
-              .enqueueUniqueWork(
-                  getManifestUniqueWorkName(modelType, modelLanguageTag),
-                  ExistingWorkPolicy.REPLACE,
-                  manifestDownloadRequest)
-              .getResult()
-              .get();
-          TcLog.d(TAG, "Download scheduled: " + manifestUrl);
-        } catch (ExecutionException | InterruptedException e) {
-          TcLog.e(TAG, "Failed to enqueue a request", e);
-        }
-      }
-    }
-  }
-
-  @VisibleForTesting
-  void checkConfigAndScheduleDownloadsForTesting() {
-    checkConfigAndScheduleDownloads();
-  }
-
-  @VisibleForTesting
-  static String getTargetManifestPath(String targetModelPath) {
-    return targetModelPath + ".manifest";
-  }
-
-  @VisibleForTesting
-  static String getManifestUniqueWorkName(
-      @ModelType.ModelTypeDef String modelType, String modelLanguageTag) {
-    return String.format("manifest-%s-%s", modelType, modelLanguageTag);
-  }
-
-  // ManifestDownloadWorker needs to access this
-  static String getModelUniqueWorkName(
-      @ModelType.ModelTypeDef String modelType, String modelLanguageTag) {
-    return "model-" + modelType + "-" + modelLanguageTag;
-  }
-}
diff --git a/java/src/com/android/textclassifier/ModelDownloadWorker.java b/java/src/com/android/textclassifier/ModelDownloadWorker.java
deleted file mode 100644
index 641af8a..0000000
--- a/java/src/com/android/textclassifier/ModelDownloadWorker.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import android.content.Context;
-import androidx.work.Data;
-import androidx.work.WorkerParameters;
-import com.android.textclassifier.common.base.TcLog;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.hash.HashCode;
-import com.google.common.hash.Hashing;
-import com.google.common.io.Files;
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.StandardCopyOption;
-
-/** Worker to download, validate and update model image files. */
-public final class ModelDownloadWorker extends AbstractDownloadWorker {
-  private static final String TAG = "ModelDownloadWorker";
-
-  @VisibleForTesting
-  static final String DATA_MANIFEST_PATH_KEY = "ModelDownloadWorker_manifestPath";
-
-  @VisibleForTesting
-  static final String DATA_TARGET_MODEL_PATH_KEY = "ModelDownloadWorker_targetModelPath";
-
-  @VisibleForTesting
-  static final String DATA_MODEL_SIZE_IN_BYTES_KEY = "ModelDownloadWorker_modelSizeInBytes";
-
-  @VisibleForTesting
-  static final String DATA_MODEL_FINGERPRINT_KEY = "ModelDownloadWorker_modelFingerprint";
-
-  private final String manifestPath;
-  private final String targetModelPath;
-  private final long modelSizeInBytes;
-  private final String modelFingerprint;
-  private final ModelFileManager modelFileManager;
-
-  public ModelDownloadWorker(Context context, WorkerParameters workerParams) {
-    super(context, workerParams);
-    this.manifestPath =
-        Preconditions.checkNotNull(getInputData().getString(DATA_MANIFEST_PATH_KEY));
-    this.targetModelPath =
-        Preconditions.checkNotNull(getInputData().getString(DATA_TARGET_MODEL_PATH_KEY));
-    this.modelSizeInBytes =
-        getInputData().getLong(DATA_MODEL_SIZE_IN_BYTES_KEY, /* defaultValue= */ 0L);
-    this.modelFingerprint =
-        Preconditions.checkNotNull(getInputData().getString(DATA_MODEL_FINGERPRINT_KEY));
-    this.modelFileManager = new ModelFileManager(context, new TextClassifierSettings());
-  }
-
-  @Override
-  public Void handleDownloadedFile(File pendingModelFile) {
-    TcLog.d(TAG, "Start to check pending model file: " + pendingModelFile.getAbsolutePath());
-    try {
-      validateModel(pendingModelFile, modelSizeInBytes, modelFingerprint);
-
-      File targetModelFile = new File(targetModelPath);
-      java.nio.file.Files.move(
-          pendingModelFile.toPath(),
-          targetModelFile.toPath(),
-          StandardCopyOption.ATOMIC_MOVE,
-          StandardCopyOption.REPLACE_EXISTING);
-      TcLog.d(TAG, "Model file downloaded successfully: " + targetModelFile.getAbsolutePath());
-
-      // Clean up manifest and older models
-      new File(manifestPath).delete();
-      modelFileManager.deleteUnusedModelFiles();
-      return null;
-    } catch (Exception e) {
-      throw new IllegalStateException("Failed to validate or move pending model file.", e);
-    } finally {
-      pendingModelFile.delete();
-    }
-  }
-
-  /** Model verification. Throws unchecked Exceptions if validation fails. */
-  private static void validateModel(File pendingModelFile, long sizeInBytes, String fingerprint)
-      throws IOException {
-    if (!pendingModelFile.exists()) {
-      throw new IllegalStateException("PendingModelFile does not exist.");
-    }
-    if (pendingModelFile.length() != sizeInBytes) {
-      throw new IllegalStateException(
-          String.format(
-              "PendingModelFile size does not match: expected [%d] actual [%d]",
-              sizeInBytes, pendingModelFile.length()));
-    }
-    HashCode pendingModelFingerprint = Files.asByteSource(pendingModelFile).hash(Hashing.sha384());
-    if (!pendingModelFingerprint.equals(HashCode.fromString(fingerprint))) {
-      throw new IllegalStateException(
-          String.format(
-              "PendingModelFile fingerprint does not match: expected [%s] actual [%s]",
-              fingerprint, pendingModelFingerprint));
-    }
-    TcLog.d(TAG, "Pending model file passed validation.");
-  }
-
-  /** Creates input Data for a ModelDownloadWorker. */
-  public static Data createInputData(
-      String modelUrl,
-      long modelSizeInBytes,
-      String modelFingerprint,
-      String manifestPath,
-      String pendingModelPath,
-      String targetModelPath,
-      int maxDownloadAttempts,
-      boolean reuseExistingModelFile) {
-    return AbstractDownloadWorker.createInputDataBuilder(
-            modelUrl, pendingModelPath, reuseExistingModelFile, maxDownloadAttempts)
-        .putString(DATA_MANIFEST_PATH_KEY, manifestPath)
-        .putString(DATA_TARGET_MODEL_PATH_KEY, targetModelPath)
-        .putLong(DATA_MODEL_SIZE_IN_BYTES_KEY, modelSizeInBytes)
-        .putString(DATA_MODEL_FINGERPRINT_KEY, modelFingerprint)
-        .build();
-  }
-}
diff --git a/java/src/com/android/textclassifier/ModelDownloader.java b/java/src/com/android/textclassifier/ModelDownloader.java
deleted file mode 100644
index 7839a9b..0000000
--- a/java/src/com/android/textclassifier/ModelDownloader.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import com.google.common.util.concurrent.ListenableFuture;
-import java.io.File;
-import java.net.URI;
-
-/** Interface for downloading files from certain URI. */
-public interface ModelDownloader {
-
-  /**
-   * Downloads a file from the given URI to the target file.
-   *
-   * <p>For a successful download, this method returns a Future containing the number of bytes
-   * written. For a failure case, the Future would fail, with the exception containing more
-   * information. The implementations of this interface should clean up unfinished model files if
-   * the download fails.
-   *
-   * @param uri the URI to download file from
-   * @param targetFile the target File to write the downloaded content. If the file already exists,
-   *     its content will be cleared
-   */
-  ListenableFuture<Long> download(URI uri, File targetFile);
-}
diff --git a/java/src/com/android/textclassifier/ModelDownloaderImpl.java b/java/src/com/android/textclassifier/ModelDownloaderImpl.java
deleted file mode 100644
index 83eddde..0000000
--- a/java/src/com/android/textclassifier/ModelDownloaderImpl.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import static android.content.Context.BIND_AUTO_CREATE;
-import static android.content.Context.BIND_NOT_FOREGROUND;
-
-import android.content.ComponentName;
-import android.content.Context;
-import android.content.Intent;
-import android.content.ServiceConnection;
-import android.os.IBinder;
-import androidx.concurrent.futures.CallbackToFutureAdapter;
-import com.android.textclassifier.common.base.TcLog;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import java.io.File;
-import java.net.URI;
-import java.util.concurrent.ExecutorService;
-
-/**
- * ModelDownloader implementation that forwards requests to ModelDownloaderService. This is to
- * restrict the INTERNET permission to the service process only (instead of the whole ExtServices).
- */
-final class ModelDownloaderImpl implements ModelDownloader {
-  private static final String TAG = "ModelDownloaderImpl";
-
-  private final Context context;
-  private final ExecutorService bgExecutorService;
-  private final Class<?> downloaderServiceClass;
-
-  public ModelDownloaderImpl(Context context, ExecutorService bgExecutorService) {
-    this(context, bgExecutorService, ModelDownloaderService.class);
-  }
-
-  @VisibleForTesting
-  ModelDownloaderImpl(
-      Context context, ExecutorService bgExecutorService, Class<?> downloaderServiceClass) {
-    this.context = context.getApplicationContext();
-    this.bgExecutorService = bgExecutorService;
-    this.downloaderServiceClass = downloaderServiceClass;
-  }
-
-  @Override
-  public ListenableFuture<Long> download(URI uri, File targetFile) {
-    DownloaderServiceConnection conn = new DownloaderServiceConnection();
-    ListenableFuture<IModelDownloaderService> downloaderServiceFuture = connect(conn);
-    ListenableFuture<Long> bytesWrittenFuture =
-        Futures.transformAsync(
-            downloaderServiceFuture,
-            service -> scheduleDownload(service, uri, targetFile),
-            bgExecutorService);
-    bytesWrittenFuture.addListener(
-        () -> {
-          try {
-            context.unbindService(conn);
-          } catch (IllegalArgumentException e) {
-            TcLog.e(TAG, "Error when unbind", e);
-          }
-        },
-        bgExecutorService);
-    return bytesWrittenFuture;
-  }
-
-  private ListenableFuture<IModelDownloaderService> connect(DownloaderServiceConnection conn) {
-    TcLog.d(TAG, "Starting a new connection to ModelDownloaderService");
-    return CallbackToFutureAdapter.getFuture(
-        completer -> {
-          conn.attachCompleter(completer);
-          Intent intent = new Intent(context, downloaderServiceClass);
-          if (context.bindService(intent, conn, BIND_AUTO_CREATE | BIND_NOT_FOREGROUND)) {
-            return "Binding to service";
-          } else {
-            completer.setException(new RuntimeException("Unable to bind to service"));
-            return "Binding failed";
-          }
-        });
-  }
-
-  // Here the returned download result future can be set by: 1) the service can invoke the callback
-  // and set the result/exception; 2) If the service crashed, the CallbackToFutureAdapter will try
-  // to fail the future when the callback is garbage collected. If somehow none of them worked, the
-  // restult future will hang there until time out. (WorkManager forces a 10-min running time.)
-  private static ListenableFuture<Long> scheduleDownload(
-      IModelDownloaderService service, URI uri, File targetFile) {
-    TcLog.d(TAG, "Scheduling a new download task with ModelDownloaderService");
-    return CallbackToFutureAdapter.getFuture(
-        completer -> {
-          service.download(
-              uri.toString(),
-              targetFile.getAbsolutePath(),
-              new IModelDownloaderCallback.Stub() {
-                @Override
-                public void onSuccess(long bytesWritten) {
-                  completer.set(bytesWritten);
-                }
-
-                @Override
-                public void onFailure(String errorMsg) {
-                  completer.setException(new RuntimeException(errorMsg));
-                }
-              });
-          return "downlaoderService.download";
-        });
-  }
-
-  /** The implementation of {@link ServiceConnection} that handles changes in the connection. */
-  @VisibleForTesting
-  static class DownloaderServiceConnection implements ServiceConnection {
-    private static final String TAG = "ModelDownloaderImpl.DownloaderServiceConnection";
-
-    private CallbackToFutureAdapter.Completer<IModelDownloaderService> completer;
-
-    public void attachCompleter(
-        CallbackToFutureAdapter.Completer<IModelDownloaderService> completer) {
-      this.completer = completer;
-    }
-
-    @Override
-    public void onServiceConnected(ComponentName componentName, IBinder iBinder) {
-      TcLog.d(TAG, "DownloaderService connected");
-      completer.set(Preconditions.checkNotNull(IModelDownloaderService.Stub.asInterface(iBinder)));
-    }
-
-    @Override
-    public void onServiceDisconnected(ComponentName componentName) {
-      // If this is invoked after onServiceConnected, it will be ignored by the completer.
-      completer.setException(new RuntimeException("Service disconnected"));
-    }
-
-    @Override
-    public void onBindingDied(ComponentName name) {
-      completer.setException(new RuntimeException("Binding died"));
-    }
-
-    @Override
-    public void onNullBinding(ComponentName name) {
-      completer.setException(new RuntimeException("Unable to bind to DownloaderService"));
-    }
-  }
-}
diff --git a/java/src/com/android/textclassifier/ModelDownloaderService.java b/java/src/com/android/textclassifier/ModelDownloaderService.java
deleted file mode 100644
index 6fe4ee9..0000000
--- a/java/src/com/android/textclassifier/ModelDownloaderService.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import android.app.Service;
-import android.content.Intent;
-import android.os.IBinder;
-import com.android.textclassifier.common.base.TcLog;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-/** Service to expose IModelDownloaderService. */
-public final class ModelDownloaderService extends Service {
-  private static final String TAG = "ModelDownloaderService";
-
-  private ExecutorService executorService;
-  private IBinder iBinder;
-
-  @Override
-  public void onCreate() {
-    super.onCreate();
-    // TODO(licha): Use a shared thread pool for IO intensive tasks
-    this.executorService = Executors.newSingleThreadExecutor();
-    this.iBinder = new ModelDownloaderServiceImpl(executorService);
-  }
-
-  @Override
-  public IBinder onBind(Intent intent) {
-    TcLog.d(TAG, "Binding to ModelDownloadService");
-    return iBinder;
-  }
-
-  @Override
-  public void onDestroy() {
-    TcLog.d(TAG, "Destroying ModelDownloadService");
-    executorService.shutdown();
-  }
-}
diff --git a/java/src/com/android/textclassifier/ModelDownloaderServiceImpl.java b/java/src/com/android/textclassifier/ModelDownloaderServiceImpl.java
deleted file mode 100644
index 497beca..0000000
--- a/java/src/com/android/textclassifier/ModelDownloaderServiceImpl.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import android.os.RemoteException;
-import com.android.textclassifier.common.base.TcLog;
-import com.google.android.downloader.AndroidDownloaderLogger;
-import com.google.android.downloader.ConnectivityHandler;
-import com.google.android.downloader.DownloadConstraints;
-import com.google.android.downloader.DownloadRequest;
-import com.google.android.downloader.DownloadResult;
-import com.google.android.downloader.Downloader;
-import com.google.android.downloader.PlatformUrlEngine;
-import com.google.android.downloader.SimpleFileDownloadDestination;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.FutureCallback;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.MoreExecutors;
-import java.io.File;
-import java.net.URI;
-import java.util.Collections;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import javax.annotation.concurrent.ThreadSafe;
-
-/** IModelDownloaderService implementation with Android Downloader library. */
-@ThreadSafe
-final class ModelDownloaderServiceImpl extends IModelDownloaderService.Stub {
-  private static final String TAG = "ModelDownloaderServiceImpl";
-
-  // Connectivity constraints will be checked by WorkManager instead.
-  private static class NoOpConnectivityHandler implements ConnectivityHandler {
-    @Override
-    public ListenableFuture<Void> checkConnectivity(DownloadConstraints constraints) {
-      return Futures.immediateVoidFuture();
-    }
-  }
-
-  private final ExecutorService bgExecutorService;
-  private final Downloader downloader;
-
-  public ModelDownloaderServiceImpl(ExecutorService bgExecutorService) {
-    this.bgExecutorService = bgExecutorService;
-    this.downloader =
-        new Downloader.Builder()
-            .withIOExecutor(bgExecutorService)
-            .withConnectivityHandler(new NoOpConnectivityHandler())
-            .addUrlEngine(
-                Collections.singleton("https"),
-                new PlatformUrlEngine(
-                    // TODO(licha): use a shared thread pool
-                    MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor()),
-                    /* connectTimeoutMs= */ 60 * 1000,
-                    /* readTimeoutMs= */ 60 * 1000))
-            .withLogger(new AndroidDownloaderLogger())
-            .build();
-  }
-
-  @VisibleForTesting
-  ModelDownloaderServiceImpl(ExecutorService bgExecutorService, Downloader downloader) {
-    this.bgExecutorService = Preconditions.checkNotNull(bgExecutorService);
-    this.downloader = Preconditions.checkNotNull(downloader);
-  }
-
-  @Override
-  public void download(String uri, String targetFilePath, IModelDownloaderCallback callback) {
-    TcLog.d(TAG, "Download request received: " + uri);
-    try {
-      Preconditions.checkArgument(
-          uri.startsWith(ModelDownloadManager.TEXT_CLASSIFIER_URL_PREFIX),
-          "Can only download TextClassifier resources, but uri is: %s",
-          uri);
-      File targetFile = new File(targetFilePath);
-      File tempMetadataFile = getMetadataFile(targetFile);
-      DownloadRequest request =
-          downloader
-              .newRequestBuilder(
-                  URI.create(uri), new SimpleFileDownloadDestination(targetFile, tempMetadataFile))
-              .build();
-      downloader
-          .execute(request)
-          .transform(DownloadResult::bytesWritten, MoreExecutors.directExecutor())
-          .addCallback(
-              new FutureCallback<Long>() {
-                @Override
-                public void onSuccess(Long bytesWritten) {
-                  tempMetadataFile.delete();
-                  dispatchOnSuccessSafely(callback, bytesWritten);
-                }
-
-                @Override
-                public void onFailure(Throwable t) {
-                  // TODO(licha): We may be able to resume the download if we keep those files
-                  targetFile.delete();
-                  tempMetadataFile.delete();
-                  dispatchOnFailureSafely(callback, t);
-                }
-              },
-              bgExecutorService);
-    } catch (Throwable t) {
-      dispatchOnFailureSafely(callback, t);
-    }
-  }
-
-  @VisibleForTesting
-  static File getMetadataFile(File targetFile) {
-    return new File(targetFile.getParentFile(), targetFile.getName() + ".metadata");
-  }
-
-  private static void dispatchOnSuccessSafely(
-      IModelDownloaderCallback callback, long bytesWritten) {
-    try {
-      callback.onSuccess(bytesWritten);
-    } catch (RemoteException e) {
-      TcLog.e(TAG, "Unable to notify successful download", e);
-    }
-  }
-
-  private static void dispatchOnFailureSafely(
-      IModelDownloaderCallback callback, Throwable throwable) {
-    try {
-      callback.onFailure(throwable.getMessage());
-    } catch (RemoteException e) {
-      TcLog.e(TAG, "Unable to notify failures in download", e);
-    }
-  }
-}
diff --git a/java/src/com/android/textclassifier/TextClassifierImpl.java b/java/src/com/android/textclassifier/TextClassifierImpl.java
index b824ed0..bf326fb 100644
--- a/java/src/com/android/textclassifier/TextClassifierImpl.java
+++ b/java/src/com/android/textclassifier/TextClassifierImpl.java
@@ -33,6 +33,8 @@
 import android.view.textclassifier.ConversationActions;
 import android.view.textclassifier.SelectionEvent;
 import android.view.textclassifier.TextClassification;
+import android.view.textclassifier.TextClassification.Request;
+import android.view.textclassifier.TextClassificationContext;
 import android.view.textclassifier.TextClassificationSessionId;
 import android.view.textclassifier.TextClassifier;
 import android.view.textclassifier.TextClassifierEvent;
@@ -42,8 +44,10 @@
 import androidx.annotation.GuardedBy;
 import androidx.annotation.WorkerThread;
 import androidx.core.util.Pair;
-import com.android.textclassifier.ModelFileManager.ModelFile;
-import com.android.textclassifier.ModelFileManager.ModelType;
+import com.android.textclassifier.common.ModelFileManager;
+import com.android.textclassifier.common.ModelFileManager.ModelFile;
+import com.android.textclassifier.common.ModelType;
+import com.android.textclassifier.common.TextClassifierSettings;
 import com.android.textclassifier.common.base.TcLog;
 import com.android.textclassifier.common.intent.LabeledIntent;
 import com.android.textclassifier.common.intent.TemplateIntentFactory;
@@ -56,6 +60,7 @@
 import com.android.textclassifier.common.statsd.TextClassifierEventLogger;
 import com.android.textclassifier.utils.IndentingPrintWriter;
 import com.google.android.textclassifier.ActionsSuggestionsModel;
+import com.google.android.textclassifier.ActionsSuggestionsModel.ActionSuggestions;
 import com.google.android.textclassifier.AnnotatorModel;
 import com.google.android.textclassifier.LangIdModel;
 import com.google.common.base.Optional;
@@ -125,7 +130,11 @@
   }
 
   @WorkerThread
-  TextSelection suggestSelection(TextSelection.Request request) throws IOException {
+  TextSelection suggestSelection(
+      @Nullable TextClassificationSessionId sessionId,
+      @Nullable TextClassificationContext textClassificationContext,
+      TextSelection.Request request)
+      throws IOException {
     Preconditions.checkNotNull(request);
     checkMainThread();
     final int rangeLength = request.getEndIndex() - request.getStartIndex();
@@ -184,7 +193,11 @@
   }
 
   @WorkerThread
-  TextClassification classifyText(TextClassification.Request request) throws IOException {
+  TextClassification classifyText(
+      @Nullable TextClassificationSessionId sessionId,
+      @Nullable TextClassificationContext textClassificationContext,
+      Request request)
+      throws IOException {
     Preconditions.checkNotNull(request);
     checkMainThread();
     LangIdModel langId = getLangIdImpl();
@@ -224,7 +237,11 @@
   }
 
   @WorkerThread
-  TextLinks generateLinks(TextLinks.Request request) throws IOException {
+  TextLinks generateLinks(
+      @Nullable TextClassificationSessionId sessionId,
+      @Nullable TextClassificationContext textClassificationContext,
+      TextLinks.Request request)
+      throws IOException {
     Preconditions.checkNotNull(request);
     Preconditions.checkArgument(
         request.getText().length() <= getMaxGenerateLinksTextLength(),
@@ -291,6 +308,8 @@
       langIdModelInfo = Optional.fromNullable(langIdModelInUse).transform(ModelFile::toModelInfo);
     }
     generateLinksLogger.logGenerateLinks(
+        sessionId,
+        textClassificationContext,
         request.getText(),
         links,
         callingPackageName,
@@ -319,7 +338,7 @@
     }
   }
 
-  void onSelectionEvent(SelectionEvent event) {
+  void onSelectionEvent(@Nullable TextClassificationSessionId sessionId, SelectionEvent event) {
     TextClassifierEvent textClassifierEvent = SelectionEventConverter.toTextClassifierEvent(event);
     if (textClassifierEvent == null) {
       return;
@@ -334,7 +353,11 @@
         TextClassifierEventConverter.fromPlatform(event));
   }
 
-  TextLanguage detectLanguage(TextLanguage.Request request) throws IOException {
+  TextLanguage detectLanguage(
+      @Nullable TextClassificationSessionId sessionId,
+      @Nullable TextClassificationContext textClassificationContext,
+      TextLanguage.Request request)
+      throws IOException {
     Preconditions.checkNotNull(request);
     checkMainThread();
     final TextLanguage.Builder builder = new TextLanguage.Builder();
@@ -347,7 +370,10 @@
     return builder.build();
   }
 
-  ConversationActions suggestConversationActions(ConversationActions.Request request)
+  ConversationActions suggestConversationActions(
+      @Nullable TextClassificationSessionId sessionId,
+      @Nullable TextClassificationContext textClassificationContext,
+      ConversationActions.Request request)
       throws IOException {
     Preconditions.checkNotNull(request);
     checkMainThread();
@@ -362,7 +388,7 @@
     ActionsSuggestionsModel.Conversation nativeConversation =
         new ActionsSuggestionsModel.Conversation(nativeMessages);
 
-    ActionsSuggestionsModel.ActionSuggestion[] nativeSuggestions =
+    ActionSuggestions nativeSuggestions =
         actionsImpl.suggestActionsWithIntents(
             nativeConversation,
             null,
@@ -379,11 +405,11 @@
    * non-null component name is in the extras.
    */
   private ConversationActions createConversationActionResult(
-      ConversationActions.Request request,
-      ActionsSuggestionsModel.ActionSuggestion[] nativeSuggestions) {
+      ConversationActions.Request request, ActionSuggestions nativeSuggestions) {
     Collection<String> expectedTypes = resolveActionTypesFromRequest(request);
     List<ConversationAction> conversationActions = new ArrayList<>();
-    for (ActionsSuggestionsModel.ActionSuggestion nativeSuggestion : nativeSuggestions) {
+    for (ActionsSuggestionsModel.ActionSuggestion nativeSuggestion :
+        nativeSuggestions.actionSuggestions) {
       String actionType = nativeSuggestion.getActionType();
       if (!expectedTypes.contains(actionType)) {
         continue;
@@ -648,6 +674,7 @@
 
       printWriter.println();
       settings.dump(printWriter);
+      printWriter.println();
     }
   }
 
@@ -664,7 +691,7 @@
 
   private static void checkMainThread() {
     if (Looper.myLooper() == Looper.getMainLooper()) {
-      TcLog.e(TAG, "TextClassifier called on main thread", new Exception());
+      TcLog.e(TAG, "TCS TextClassifier called on main thread", new Exception());
     }
   }
 
diff --git a/java/src/com/android/textclassifier/ModelFileManager.java b/java/src/com/android/textclassifier/common/ModelFileManager.java
similarity index 91%
rename from java/src/com/android/textclassifier/ModelFileManager.java
rename to java/src/com/android/textclassifier/common/ModelFileManager.java
index bdd3ae2..406a889 100644
--- a/java/src/com/android/textclassifier/ModelFileManager.java
+++ b/java/src/com/android/textclassifier/common/ModelFileManager.java
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package com.android.textclassifier;
+package com.android.textclassifier.common;
 
 import android.content.Context;
 import android.content.res.AssetFileDescriptor;
@@ -23,9 +23,8 @@
 import android.os.ParcelFileDescriptor;
 import android.util.ArraySet;
 import androidx.annotation.GuardedBy;
-import androidx.annotation.StringDef;
 import androidx.collection.ArrayMap;
-import com.android.textclassifier.ModelFileManager.ModelType.ModelTypeDef;
+import com.android.textclassifier.common.ModelType.ModelTypeDef;
 import com.android.textclassifier.common.base.TcLog;
 import com.android.textclassifier.common.logging.ResultIdUtils.ModelInfo;
 import com.android.textclassifier.utils.IndentingPrintWriter;
@@ -40,8 +39,6 @@
 import com.google.common.collect.ImmutableList;
 import java.io.File;
 import java.io.IOException;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Locale;
@@ -58,7 +55,7 @@
  * Manages all model files in storage. {@link TextClassifierImpl} depends on this class to get the
  * model files to load.
  */
-final class ModelFileManager {
+public final class ModelFileManager {
 
   private static final String TAG = "ModelFileManager";
 
@@ -128,7 +125,7 @@
   }
 
   @VisibleForTesting
-  ModelFileManager(Context context, List<ModelFileLister> modelFileListers) {
+  public ModelFileManager(Context context, List<ModelFileLister> modelFileListers) {
     this.modelDownloaderDir = new File(context.getFilesDir(), DOWNLOAD_SUB_DIR_NAME);
     this.modelFileListers = ImmutableList.copyOf(modelFileListers);
   }
@@ -377,25 +374,9 @@
     }
   }
 
-  /**
-   * Returns a {@link File} that represents the destination to download a model.
-   *
-   * <p>Each model file's name is uniquely formatted based on its unique remote manifest URL suffix.
-   *
-   * <p>{@link ModelDownloadManager} needs to call this to get the right location and file name.
-   *
-   * @param modelType the type of the model image to download
-   * @param manifestUrlSuffix the unique remote url suffix of the model manifest
-   */
-  public File getDownloadTargetFile(
-      @ModelType.ModelTypeDef String modelType, String manifestUrlSuffix) {
-    // TODO(licha): Consider preserving the folder hierarchy of the URL
-    String fileMidName = manifestUrlSuffix.replaceAll("[^A-Za-z0-9]", "_");
-    if (fileMidName.endsWith("_manifest")) {
-      fileMidName = fileMidName.substring(0, fileMidName.length() - "_manifest".length());
-    }
-    String fileName = String.format("%s.%s.model", modelType, fileMidName);
-    return new File(modelDownloaderDir, fileName);
+  /** Returns the directory containing models downloaded by the downloader. */
+  public File getModelDownloaderDir() {
+    return modelDownloaderDir;
   }
 
   /**
@@ -619,24 +600,4 @@
           .collect(Collectors.collectingAndThen(Collectors.toList(), ImmutableList::copyOf));
     }
   }
-
-  /** Effectively an enum class to represent types of models. */
-  public static final class ModelType {
-    @Retention(RetentionPolicy.SOURCE)
-    @StringDef({ANNOTATOR, LANG_ID, ACTIONS_SUGGESTIONS})
-    @interface ModelTypeDef {}
-
-    public static final String ANNOTATOR = "annotator";
-    public static final String LANG_ID = "lang_id";
-    public static final String ACTIONS_SUGGESTIONS = "actions_suggestions";
-
-    public static final ImmutableList<String> VALUES =
-        ImmutableList.of(ANNOTATOR, LANG_ID, ACTIONS_SUGGESTIONS);
-
-    public static ImmutableList<String> values() {
-      return VALUES;
-    }
-
-    private ModelType() {}
-  }
 }
diff --git a/java/src/com/android/textclassifier/common/ModelType.java b/java/src/com/android/textclassifier/common/ModelType.java
new file mode 100644
index 0000000..a30fce0
--- /dev/null
+++ b/java/src/com/android/textclassifier/common/ModelType.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.textclassifier.common;
+
+import androidx.annotation.StringDef;
+import com.google.common.collect.ImmutableList;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/** Effectively an enum class to represent types of models. */
+public final class ModelType {
+  /** TextClassifier model types as String. */
+  @Retention(RetentionPolicy.SOURCE)
+  @StringDef({ANNOTATOR, LANG_ID, ACTIONS_SUGGESTIONS})
+  public @interface ModelTypeDef {}
+
+  public static final String ANNOTATOR = "annotator";
+  public static final String LANG_ID = "lang_id";
+  public static final String ACTIONS_SUGGESTIONS = "actions_suggestions";
+
+  public static final ImmutableList<String> VALUES =
+      ImmutableList.of(ANNOTATOR, LANG_ID, ACTIONS_SUGGESTIONS);
+
+  public static ImmutableList<String> values() {
+    return VALUES;
+  }
+
+  private ModelType() {}
+}
diff --git a/java/src/com/android/textclassifier/common/TextClassifierServiceExecutors.java b/java/src/com/android/textclassifier/common/TextClassifierServiceExecutors.java
new file mode 100644
index 0000000..43164e0
--- /dev/null
+++ b/java/src/com/android/textclassifier/common/TextClassifierServiceExecutors.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.textclassifier.common;
+
+import com.android.textclassifier.common.base.TcLog;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import java.util.concurrent.Executors;
+
+// TODO(licha): Revisit the configurations of thread pools
+/**
+ * Holder of executor singletons.
+ *
+ * <p>Note because we have two processes, so we may keep two copis of executors in this class.
+ */
+public final class TextClassifierServiceExecutors {
+  private static final String TAG = "TextClassifierServiceExecutors";
+
+  /** Returns an executor with normal priority. Used for handling client requests. */
+  public static ListeningExecutorService getNormhPriorityExecutor() {
+    return NormPriorityExecutorHolder.normPriorityExecutor;
+  }
+
+  /** Returns a single-thread executor with low priority. Used for internal tasks like logging. */
+  public static ListeningExecutorService getLowPriorityExecutor() {
+    return LowPriorityExecutorHolder.lowPriorityExecutor;
+  }
+
+  private static class NormPriorityExecutorHolder {
+    static final ListeningExecutorService normPriorityExecutor =
+        init("tcs-norm-prio-executor-%d", Thread.NORM_PRIORITY, /* corePoolSize= */ 2);
+  }
+
+  private static class LowPriorityExecutorHolder {
+    static final ListeningExecutorService lowPriorityExecutor =
+        init("tcs-low-prio-executor-%d", Thread.NORM_PRIORITY - 1, /* corePoolSize= */ 1);
+  }
+
+  private static ListeningExecutorService init(String nameFormat, int priority, int corePoolSize) {
+    TcLog.v(TAG, "Creating executor: " + nameFormat);
+    return MoreExecutors.listeningDecorator(
+        Executors.newFixedThreadPool(
+            corePoolSize,
+            new ThreadFactoryBuilder()
+                .setNameFormat(nameFormat)
+                .setPriority(priority)
+                // In Android, those uncaught exceptions will crash the whole process if not handled
+                .setUncaughtExceptionHandler(
+                    (thread, throwable) ->
+                        TcLog.e(TAG, "Exception from executor: " + thread, throwable))
+                .build()));
+  }
+
+  private TextClassifierServiceExecutors() {}
+}
diff --git a/java/src/com/android/textclassifier/TextClassifierSettings.java b/java/src/com/android/textclassifier/common/TextClassifierSettings.java
similarity index 85%
rename from java/src/com/android/textclassifier/TextClassifierSettings.java
rename to java/src/com/android/textclassifier/common/TextClassifierSettings.java
index e4ad140..fdf259e 100644
--- a/java/src/com/android/textclassifier/TextClassifierSettings.java
+++ b/java/src/com/android/textclassifier/common/TextClassifierSettings.java
@@ -14,16 +14,15 @@
  * limitations under the License.
  */
 
-package com.android.textclassifier;
+package com.android.textclassifier.common;
+
+import static java.util.concurrent.TimeUnit.HOURS;
 
 import android.provider.DeviceConfig;
 import android.provider.DeviceConfig.Properties;
 import android.view.textclassifier.ConversationAction;
 import android.view.textclassifier.TextClassifier;
 import androidx.annotation.NonNull;
-import androidx.work.NetworkType;
-import com.android.textclassifier.ModelFileManager.ModelType;
-import com.android.textclassifier.common.base.TcLog;
 import com.android.textclassifier.utils.IndentingPrintWriter;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Splitter;
@@ -112,18 +111,25 @@
 
   /** Whether to enable model downloading with ModelDownloadManager */
   @VisibleForTesting
-  static final String MODEL_DOWNLOAD_MANAGER_ENABLED = "model_download_manager_enabled";
+  public static final String MODEL_DOWNLOAD_MANAGER_ENABLED = "model_download_manager_enabled";
   /** Type of network to download model manifest. A String value of androidx.work.NetworkType. */
   private static final String MANIFEST_DOWNLOAD_REQUIRED_NETWORK_TYPE =
       "manifest_download_required_network_type";
   /** Max attempts allowed for a single ModelDownloader downloading task. */
   @VisibleForTesting
   static final String MODEL_DOWNLOAD_MAX_ATTEMPTS = "model_download_max_attempts";
-  /** Flag name for url suffix is dynamically formatted based on model type and model language. */
-  @VisibleForTesting static final String MANIFEST_URL_SUFFIX_TEMPLATE = "url_suffix_%s_%s";
+
+  @VisibleForTesting
+  static final String MODEL_DOWNLOAD_BACKOFF_DELAY_IN_MILLIS =
+      "model_download_backoff_delay_in_millis";
+  /** Flag name for manifest url is dynamically formatted based on model type and model language. */
+  @VisibleForTesting public static final String MANIFEST_URL_TEMPLATE = "manifest_url_%s_%s";
   /** Sampling rate for TextClassifier API logging. */
   static final String TEXTCLASSIFIER_API_LOG_SAMPLE_RATE = "textclassifier_api_log_sample_rate";
 
+  /** The size of the cache of the mapping of session id to text classification context. */
+  private static final String SESSION_ID_TO_CONTEXT_CACHE_SIZE = "session_id_to_context_cache_size";
+
   /**
    * A colon(:) separated string that specifies the configuration to use when including surrounding
    * context text in language detection queries.
@@ -188,10 +194,10 @@
   private static final boolean TRANSLATE_IN_CLASSIFICATION_ENABLED_DEFAULT = true;
   private static final boolean DETECT_LANGUAGES_FROM_TEXT_ENABLED_DEFAULT = true;
   private static final boolean MODEL_DOWNLOAD_MANAGER_ENABLED_DEFAULT = false;
-  // Manifest files are usually small, default to any network type
-  private static final String MANIFEST_DOWNLOAD_REQUIRED_NETWORK_TYPE_DEFAULT = "NOT_ROAMING";
+  private static final String MANIFEST_DOWNLOAD_REQUIRED_NETWORK_TYPE_DEFAULT = "UNMETERED";
   private static final int MODEL_DOWNLOAD_MAX_ATTEMPTS_DEFAULT = 5;
-  private static final String MANIFEST_URL_SUFFIX_DEFAULT = "";
+  private static final long MODEL_DOWNLOAD_BACKOFF_DELAY_IN_MILLIS_DEFAULT = HOURS.toMillis(1);
+  private static final String MANIFEST_URL_DEFAULT = "";
   private static final float[] LANG_ID_CONTEXT_SETTINGS_DEFAULT = new float[] {20f, 1.0f, 0.4f};
   /**
    * Sampling rate for API logging. For example, 100 means there is a 0.01 chance that the API call
@@ -199,7 +205,12 @@
    */
   private static final int TEXTCLASSIFIER_API_LOG_SAMPLE_RATE_DEFAULT = 10;
 
-  interface IDeviceConfig {
+  private static final int SESSION_ID_TO_CONTEXT_CACHE_SIZE_DEFAULT = 10;
+
+  // TODO(licha): Consider removing this. We can use real device config for testing.
+  /** DeviceConfig interface to facilitate testing. */
+  @VisibleForTesting
+  public interface IDeviceConfig {
     default Properties getProperties(@NonNull String namespace, @NonNull String... names) {
       return new Properties.Builder(namespace).build();
     }
@@ -208,6 +219,11 @@
       return defaultValue;
     }
 
+    default long getLong(
+        @NonNull String namespace, @NonNull String name, @NonNull long defaultValue) {
+      return defaultValue;
+    }
+
     default float getFloat(
         @NonNull String namespace, @NonNull String name, @NonNull float defaultValue) {
       return defaultValue;
@@ -238,6 +254,12 @@
         }
 
         @Override
+        public long getLong(
+            @NonNull String namespace, @NonNull String name, @NonNull long defaultValue) {
+          return DeviceConfig.getLong(namespace, name, defaultValue);
+        }
+
+        @Override
         public float getFloat(
             @NonNull String namespace, @NonNull String name, @NonNull float defaultValue) {
           return DeviceConfig.getFloat(namespace, name, defaultValue);
@@ -350,19 +372,12 @@
         NAMESPACE, MODEL_DOWNLOAD_MANAGER_ENABLED, MODEL_DOWNLOAD_MANAGER_ENABLED_DEFAULT);
   }
 
-  public NetworkType getManifestDownloadRequiredNetworkType() {
-    String networkType =
-        deviceConfig.getString(
-            NAMESPACE,
-            MANIFEST_DOWNLOAD_REQUIRED_NETWORK_TYPE,
-            MANIFEST_DOWNLOAD_REQUIRED_NETWORK_TYPE_DEFAULT);
-    try {
-      return NetworkType.valueOf(networkType);
-    } catch (IllegalArgumentException e) {
-      // In case the flag is not a valid enum value
-      TcLog.w(TAG, "Invalid manifest download required NetworkType: " + networkType);
-      return NetworkType.valueOf(MANIFEST_DOWNLOAD_REQUIRED_NETWORK_TYPE_DEFAULT);
-    }
+  /** Returns a string which represents a androidx.work.NetworkType enum. */
+  public String getManifestDownloadRequiredNetworkType() {
+    return deviceConfig.getString(
+        NAMESPACE,
+        MANIFEST_DOWNLOAD_REQUIRED_NETWORK_TYPE,
+        MANIFEST_DOWNLOAD_REQUIRED_NETWORK_TYPE_DEFAULT);
   }
 
   public int getModelDownloadMaxAttempts() {
@@ -370,19 +385,25 @@
         NAMESPACE, MODEL_DOWNLOAD_MAX_ATTEMPTS, MODEL_DOWNLOAD_MAX_ATTEMPTS_DEFAULT);
   }
 
+  public long getModelDownloadBackoffDelayInMillis() {
+    return deviceConfig.getLong(
+        NAMESPACE,
+        MODEL_DOWNLOAD_BACKOFF_DELAY_IN_MILLIS,
+        MODEL_DOWNLOAD_BACKOFF_DELAY_IN_MILLIS_DEFAULT);
+  }
+
   /**
-   * Get model's manifest url suffix for given model type and language.
+   * Get model's manifest url for given model type and language.
    *
    * @param modelType the type of model for the target url
    * @param modelLanguageTag the language tag for the model (e.g. en), but can also be "universal"
-   * @return DeviceConfig configured url suffix or empty string if not set
+   * @return DeviceConfig configured url or empty string if not set
    */
-  public String getManifestURLSuffix(
-      @ModelType.ModelTypeDef String modelType, String modelLanguageTag) {
-    // Example: annotator_zh_url_suffix, lang_id_universal_url_suffix
-    String urlSuffixFlagName =
-        String.format(MANIFEST_URL_SUFFIX_TEMPLATE, modelType, modelLanguageTag);
-    return deviceConfig.getString(NAMESPACE, urlSuffixFlagName, MANIFEST_URL_SUFFIX_DEFAULT);
+  public String getManifestURL(@ModelType.ModelTypeDef String modelType, String modelLanguageTag) {
+    // E.g: manifest_url_annotator_zh, manifest_url_lang_id_universal,
+    // manifest_url_actions_suggestions_en
+    String urlFlagName = String.format(MANIFEST_URL_TEMPLATE, modelType, modelLanguageTag);
+    return deviceConfig.getString(NAMESPACE, urlFlagName, MANIFEST_URL_DEFAULT);
   }
 
   /**
@@ -392,16 +413,15 @@
    * is no easy way to hardcode the list in client. Therefore, we parse all configured flag's name
    * in DeviceConfig, and let the client to choose the best variant to download.
    */
-  public ImmutableList<String> getLanguageTagsForManifestURLSuffix(
+  public ImmutableList<String> getLanguageTagsForManifestURL(
       @ModelType.ModelTypeDef String modelType) {
-    String urlSuffixFlagBaseName =
-        String.format(MANIFEST_URL_SUFFIX_TEMPLATE, modelType, /* language */ "");
+    String urlFlagBaseName = String.format(MANIFEST_URL_TEMPLATE, modelType, /* language */ "");
     Properties properties = deviceConfig.getProperties(NAMESPACE);
     ImmutableList.Builder<String> variantsBuilder = ImmutableList.builder();
     for (String name : properties.getKeyset()) {
-      if (name.startsWith(urlSuffixFlagBaseName)
+      if (name.startsWith(urlFlagBaseName)
           && properties.getString(name, /* defaultValue= */ null) != null) {
-        variantsBuilder.add(name.substring(urlSuffixFlagBaseName.length()));
+        variantsBuilder.add(name.substring(urlFlagBaseName.length()));
       }
     }
     return variantsBuilder.build();
@@ -412,7 +432,12 @@
         NAMESPACE, TEXTCLASSIFIER_API_LOG_SAMPLE_RATE, TEXTCLASSIFIER_API_LOG_SAMPLE_RATE_DEFAULT);
   }
 
-  void dump(IndentingPrintWriter pw) {
+  public int getSessionIdToContextCacheSize() {
+    return deviceConfig.getInt(
+        NAMESPACE, SESSION_ID_TO_CONTEXT_CACHE_SIZE, SESSION_ID_TO_CONTEXT_CACHE_SIZE_DEFAULT);
+  }
+
+  public void dump(IndentingPrintWriter pw) {
     pw.println("TextClassifierSettings:");
     pw.increaseIndent();
     pw.printPair(CLASSIFY_TEXT_MAX_RANGE_LENGTH, getClassifyTextMaxRangeLength());
@@ -436,6 +461,7 @@
     pw.printPair(MODEL_DOWNLOAD_MAX_ATTEMPTS, getModelDownloadMaxAttempts());
     pw.decreaseIndent();
     pw.printPair(TEXTCLASSIFIER_API_LOG_SAMPLE_RATE, getTextClassifierApiLogSampleRate());
+    pw.printPair(SESSION_ID_TO_CONTEXT_CACHE_SIZE, getSessionIdToContextCacheSize());
     pw.decreaseIndent();
   }
 
diff --git a/java/src/com/android/textclassifier/common/statsd/GenerateLinksLogger.java b/java/src/com/android/textclassifier/common/statsd/GenerateLinksLogger.java
index 822eb77..df63f2f 100644
--- a/java/src/com/android/textclassifier/common/statsd/GenerateLinksLogger.java
+++ b/java/src/com/android/textclassifier/common/statsd/GenerateLinksLogger.java
@@ -16,21 +16,20 @@
 
 package com.android.textclassifier.common.statsd;
 
+import android.view.textclassifier.TextClassificationContext;
+import android.view.textclassifier.TextClassificationSessionId;
 import android.view.textclassifier.TextClassifier;
 import android.view.textclassifier.TextLinks;
 import androidx.collection.ArrayMap;
 import com.android.textclassifier.common.base.TcLog;
 import com.android.textclassifier.common.logging.ResultIdUtils.ModelInfo;
 import com.android.textclassifier.common.logging.TextClassifierEvent;
-import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
-import java.util.UUID;
-import java.util.function.Supplier;
 import javax.annotation.Nullable;
 
 /** A helper for logging calls to generateLinks. */
@@ -40,7 +39,6 @@
 
   private final Random random;
   private final int sampleRate;
-  private final Supplier<String> randomUuidSupplier;
 
   /**
    * @param sampleRate the rate at which log events are written. (e.g. 100 means there is a 0.01
@@ -48,24 +46,14 @@
    *     events, pass 1.
    */
   public GenerateLinksLogger(int sampleRate) {
-    this(sampleRate, () -> UUID.randomUUID().toString());
-  }
-
-  /**
-   * @param sampleRate the rate at which log events are written. (e.g. 100 means there is a 0.01
-   *     chance that a call to logGenerateLinks results in an event being written). To write all
-   *     events, pass 1.
-   * @param randomUuidSupplier supplies random UUIDs.
-   */
-  @VisibleForTesting
-  GenerateLinksLogger(int sampleRate, Supplier<String> randomUuidSupplier) {
     this.sampleRate = sampleRate;
     random = new Random();
-    this.randomUuidSupplier = Preconditions.checkNotNull(randomUuidSupplier);
   }
 
   /** Logs statistics about a call to generateLinks. */
   public void logGenerateLinks(
+      @Nullable TextClassificationSessionId sessionId,
+      @Nullable TextClassificationContext textClassificationContext,
       CharSequence text,
       TextLinks links,
       String callingPackageName,
@@ -95,20 +83,33 @@
       totalStats.countLink(link);
       perEntityTypeStats.computeIfAbsent(entityType, k -> new LinkifyStats()).countLink(link);
     }
+    int widgetType = TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_UNKNOWN;
+    if (textClassificationContext != null) {
+      widgetType = WidgetTypeConverter.toLoggingValue(textClassificationContext.getWidgetType());
+    }
 
-    final String callId = randomUuidSupplier.get();
+    final String sessionIdStr = sessionId == null ? null : sessionId.getValue();
     writeStats(
-        callId, callingPackageName, null, totalStats, text, latencyMs, annotatorModel, langIdModel);
+        sessionIdStr,
+        callingPackageName,
+        null,
+        totalStats,
+        text,
+        widgetType,
+        latencyMs,
+        annotatorModel,
+        langIdModel);
     // Sort the entity types to ensure the logging order is deterministic.
     ImmutableList<String> sortedEntityTypes =
         ImmutableList.sortedCopyOf(perEntityTypeStats.keySet());
     for (String entityType : sortedEntityTypes) {
       writeStats(
-          callId,
+          sessionIdStr,
           callingPackageName,
           entityType,
           perEntityTypeStats.get(entityType),
           text,
+          widgetType,
           latencyMs,
           annotatorModel,
           langIdModel);
@@ -130,11 +131,12 @@
 
   /** Writes a log event for the given stats. */
   private static void writeStats(
-      String callId,
+      @Nullable String sessionId,
       String callingPackageName,
       @Nullable String entityType,
       LinkifyStats stats,
       CharSequence text,
+      int widgetType,
       long latencyMs,
       Optional<ModelInfo> annotatorModel,
       Optional<ModelInfo> langIdModel) {
@@ -142,10 +144,10 @@
     String langIdModelName = langIdModel.transform(ModelInfo::toModelName).or("");
     TextClassifierStatsLog.write(
         TextClassifierStatsLog.TEXT_LINKIFY_EVENT,
-        callId,
+        sessionId,
         TextClassifierEvent.TYPE_LINKS_GENERATED,
         annotatorModelName,
-        TextClassifierEventLogger.WidgetType.WIDGET_TYPE_UNKNOWN,
+        widgetType,
         /* eventIndex */ 0,
         entityType,
         stats.numLinks,
@@ -161,7 +163,7 @@
           String.format(
               Locale.US,
               "%s:%s %d links (%d/%d chars) %dms %s annotator=%s langid=%s",
-              callId,
+              sessionId,
               entityType,
               stats.numLinks,
               stats.numLinksTextLength,
diff --git a/java/src/com/android/textclassifier/common/statsd/TextClassifierEventLogger.java b/java/src/com/android/textclassifier/common/statsd/TextClassifierEventLogger.java
index 6678142..06ad44f 100644
--- a/java/src/com/android/textclassifier/common/statsd/TextClassifierEventLogger.java
+++ b/java/src/com/android/textclassifier/common/statsd/TextClassifierEventLogger.java
@@ -19,7 +19,6 @@
 import static com.google.common.base.Charsets.UTF_8;
 import static com.google.common.base.Strings.nullToEmpty;
 
-import android.view.textclassifier.TextClassifier;
 import com.android.textclassifier.common.base.TcLog;
 import com.android.textclassifier.common.logging.ResultIdUtils;
 import com.android.textclassifier.common.logging.TextClassificationContext;
@@ -195,6 +194,14 @@
     return ResultIdUtils.getModelNames(event.getResultId());
   }
 
+  private static int getWidgetType(TextClassifierEvent event) {
+    TextClassificationContext eventContext = event.getEventContext();
+    if (eventContext == null) {
+      return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_UNKNOWN;
+    }
+    return WidgetTypeConverter.toLoggingValue(eventContext.getWidgetType());
+  }
+
   @Nullable
   private static String getPackageName(TextClassifierEvent event) {
     TextClassificationContext eventContext = event.getEventContext();
@@ -203,52 +210,4 @@
     }
     return eventContext.getPackageName();
   }
-
-  private static int getWidgetType(TextClassifierEvent event) {
-    TextClassificationContext eventContext = event.getEventContext();
-    if (eventContext == null) {
-      return WidgetType.WIDGET_TYPE_UNKNOWN;
-    }
-    switch (eventContext.getWidgetType()) {
-      case TextClassifier.WIDGET_TYPE_UNKNOWN:
-        return WidgetType.WIDGET_TYPE_UNKNOWN;
-      case TextClassifier.WIDGET_TYPE_TEXTVIEW:
-        return WidgetType.WIDGET_TYPE_TEXTVIEW;
-      case TextClassifier.WIDGET_TYPE_EDITTEXT:
-        return WidgetType.WIDGET_TYPE_EDITTEXT;
-      case TextClassifier.WIDGET_TYPE_UNSELECTABLE_TEXTVIEW:
-        return WidgetType.WIDGET_TYPE_UNSELECTABLE_TEXTVIEW;
-      case TextClassifier.WIDGET_TYPE_WEBVIEW:
-        return WidgetType.WIDGET_TYPE_WEBVIEW;
-      case TextClassifier.WIDGET_TYPE_EDIT_WEBVIEW:
-        return WidgetType.WIDGET_TYPE_EDIT_WEBVIEW;
-      case TextClassifier.WIDGET_TYPE_CUSTOM_TEXTVIEW:
-        return WidgetType.WIDGET_TYPE_CUSTOM_TEXTVIEW;
-      case TextClassifier.WIDGET_TYPE_CUSTOM_EDITTEXT:
-        return WidgetType.WIDGET_TYPE_CUSTOM_EDITTEXT;
-      case TextClassifier.WIDGET_TYPE_CUSTOM_UNSELECTABLE_TEXTVIEW:
-        return WidgetType.WIDGET_TYPE_CUSTOM_UNSELECTABLE_TEXTVIEW;
-      case TextClassifier.WIDGET_TYPE_NOTIFICATION:
-        return WidgetType.WIDGET_TYPE_NOTIFICATION;
-      default: // fall out
-    }
-    return WidgetType.WIDGET_TYPE_UNKNOWN;
-  }
-
-  /** Widget type constants for logging. */
-  public static final class WidgetType {
-    // Sync these constants with textclassifier_enums.proto.
-    public static final int WIDGET_TYPE_UNKNOWN = 0;
-    public static final int WIDGET_TYPE_TEXTVIEW = 1;
-    public static final int WIDGET_TYPE_EDITTEXT = 2;
-    public static final int WIDGET_TYPE_UNSELECTABLE_TEXTVIEW = 3;
-    public static final int WIDGET_TYPE_WEBVIEW = 4;
-    public static final int WIDGET_TYPE_EDIT_WEBVIEW = 5;
-    public static final int WIDGET_TYPE_CUSTOM_TEXTVIEW = 6;
-    public static final int WIDGET_TYPE_CUSTOM_EDITTEXT = 7;
-    public static final int WIDGET_TYPE_CUSTOM_UNSELECTABLE_TEXTVIEW = 8;
-    public static final int WIDGET_TYPE_NOTIFICATION = 9;
-
-    private WidgetType() {}
-  }
 }
diff --git a/java/src/com/android/textclassifier/common/statsd/WidgetTypeConverter.java b/java/src/com/android/textclassifier/common/statsd/WidgetTypeConverter.java
new file mode 100644
index 0000000..13c04d1
--- /dev/null
+++ b/java/src/com/android/textclassifier/common/statsd/WidgetTypeConverter.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.textclassifier.common.statsd;
+
+import android.view.textclassifier.TextClassifier;
+
+/** Converts TextClassifier's WidgetTypes to enum values that are logged to server. */
+final class WidgetTypeConverter {
+  public static int toLoggingValue(String widgetType) {
+    switch (widgetType) {
+      case TextClassifier.WIDGET_TYPE_UNKNOWN:
+        return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_UNKNOWN;
+      case TextClassifier.WIDGET_TYPE_TEXTVIEW:
+        return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_TEXTVIEW;
+      case TextClassifier.WIDGET_TYPE_EDITTEXT:
+        return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_EDITTEXT;
+      case TextClassifier.WIDGET_TYPE_UNSELECTABLE_TEXTVIEW:
+        return TextClassifierStatsLog
+            .TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_UNSELECTABLE_TEXTVIEW;
+      case TextClassifier.WIDGET_TYPE_WEBVIEW:
+        return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_WEBVIEW;
+      case TextClassifier.WIDGET_TYPE_EDIT_WEBVIEW:
+        return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_EDIT_WEBVIEW;
+      case TextClassifier.WIDGET_TYPE_CUSTOM_TEXTVIEW:
+        return TextClassifierStatsLog
+            .TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_CUSTOM_TEXTVIEW;
+      case TextClassifier.WIDGET_TYPE_CUSTOM_EDITTEXT:
+        return TextClassifierStatsLog
+            .TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_CUSTOM_EDITTEXT;
+      case TextClassifier.WIDGET_TYPE_CUSTOM_UNSELECTABLE_TEXTVIEW:
+        return TextClassifierStatsLog
+            .TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_CUSTOM_UNSELECTABLE_TEXTVIEW;
+      case TextClassifier.WIDGET_TYPE_NOTIFICATION:
+        return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_NOTIFICATION;
+      case "clipboard": // TODO(tonymak) Replace it with WIDGET_TYPE_CLIPBOARD once S SDK is dropped
+        return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_CLIPBOARD;
+      default: // fall out
+    }
+    return TextClassifierStatsLog.TEXT_SELECTION_EVENT__WIDGET_TYPE__WIDGET_TYPE_UNKNOWN;
+  }
+
+  private WidgetTypeConverter() {}
+}
diff --git a/java/tests/instrumentation/Android.bp b/java/tests/instrumentation/Android.bp
index cf9a7b0..74261c1 100644
--- a/java/tests/instrumentation/Android.bp
+++ b/java/tests/instrumentation/Android.bp
@@ -45,8 +45,7 @@
         "TextClassifierServiceLib",
         "statsdprotolite",
         "textclassifierprotoslite",
-        "TextClassifierCoverageLib",
-        "androidx.work_work-testing",
+        "TextClassifierCoverageLib"
     ],
 
     jni_libs: [
@@ -55,7 +54,7 @@
     ],
 
     test_suites: [
-        "device-tests", "mts-extservices"
+        "general-tests", "mts-extservices"
     ],
 
     plugins: ["androidx.room_room-compiler-plugin",],
diff --git a/java/tests/instrumentation/AndroidManifest.xml b/java/tests/instrumentation/AndroidManifest.xml
index e8cf968..3ee30da 100644
--- a/java/tests/instrumentation/AndroidManifest.xml
+++ b/java/tests/instrumentation/AndroidManifest.xml
@@ -8,10 +8,6 @@
 
   <application>
     <uses-library android:name="android.test.runner"/>
-    <service
-        android:exported="false"
-        android:name="com.android.textclassifier.TestModelDownloaderService">
-    </service>
   </application>
 
   <instrumentation
diff --git a/java/tests/instrumentation/AndroidTest.xml b/java/tests/instrumentation/AndroidTest.xml
index 48a3f09..6c47a1a 100644
--- a/java/tests/instrumentation/AndroidTest.xml
+++ b/java/tests/instrumentation/AndroidTest.xml
@@ -13,8 +13,8 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<!-- This test config file is auto-generated. -->
 <configuration description="Runs TextClassifierServiceTest.">
+    <option name="config-descriptor:metadata" key="mainline-param" value="com.google.android.extservices.apex" />
     <option name="test-suite-tag" value="apct" />
     <option name="test-suite-tag" value="apct-instrumentation" />
     <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/AbstractDownloadWorkerTest.java b/java/tests/instrumentation/src/com/android/textclassifier/AbstractDownloadWorkerTest.java
deleted file mode 100644
index fe3b853..0000000
--- a/java/tests/instrumentation/src/com/android/textclassifier/AbstractDownloadWorkerTest.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import static com.google.common.truth.Truth.assertThat;
-
-import android.content.Context;
-import androidx.test.core.app.ApplicationProvider;
-import androidx.work.Data;
-import androidx.work.ListenableWorker;
-import androidx.work.WorkerFactory;
-import androidx.work.WorkerParameters;
-import androidx.work.testing.TestListenableWorkerBuilder;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.MoreExecutors;
-import java.io.File;
-import java.io.FileWriter;
-import java.net.URI;
-import java.util.concurrent.ExecutorService;
-import java.util.function.Function;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-@RunWith(JUnit4.class)
-public final class AbstractDownloadWorkerTest {
-  private static final String URL = "http://www.gstatic.com/android/text_classifier/q/v711/en.fb";
-  private static final String CONTENT_BYTES = "abc";
-  private static final int WORKER_MAX_DOWNLOAD_ATTEMPTS = 5;
-  private static final Function<File, Void> NO_OP_HANDLE_FUNC = f -> null;
-
-  private File targetModelFile;
-
-  @Before
-  public void setUp() {
-    this.targetModelFile =
-        new File(ApplicationProvider.getApplicationContext().getCacheDir(), "model.fb");
-    targetModelFile.deleteOnExit();
-  }
-
-  @Test
-  public void download_succeeded() throws Exception {
-    AbstractDownloadWorker worker =
-        createWorker(
-            createData(/* reuseExistingFile= */ false),
-            /* runAttemptCount= */ 0,
-            TestModelDownloader.withSuccess(CONTENT_BYTES),
-            NO_OP_HANDLE_FUNC);
-    targetModelFile.delete();
-
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.success());
-    assertThat(targetModelFile.exists()).isTrue();
-  }
-
-  @Test
-  public void download_reuseExistingFile() throws Exception {
-    AbstractDownloadWorker worker =
-        createWorker(
-            createData(/* reuseExistingFile= */ true),
-            /* runAttemptCount= */ 0,
-            // If we reuse existing file, downloader will not be invoked, thus won't fail
-            TestModelDownloader.withFailure(new Exception()),
-            NO_OP_HANDLE_FUNC);
-    targetModelFile.createNewFile();
-
-    assertThat(targetModelFile.exists()).isTrue();
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.success());
-    assertThat(targetModelFile.exists()).isTrue();
-  }
-
-  @Test
-  public void download_reuseExistingFileButNotExist() throws Exception {
-    AbstractDownloadWorker worker =
-        createWorker(
-            createData(/* reuseExistingFile= */ true),
-            /* runAttemptCount= */ 0,
-            TestModelDownloader.withSuccess(CONTENT_BYTES),
-            NO_OP_HANDLE_FUNC);
-    targetModelFile.delete();
-
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.success());
-    assertThat(targetModelFile.exists()).isTrue();
-  }
-
-  @Test
-  public void download_reuseExistingFileButNotExistAndFails() throws Exception {
-    AbstractDownloadWorker worker =
-        createWorker(
-            createData(/* reuseExistingFile= */ true),
-            /* runAttemptCount= */ 0,
-            TestModelDownloader.withFailure(new Exception()),
-            NO_OP_HANDLE_FUNC);
-    targetModelFile.delete();
-
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.retry());
-    assertThat(targetModelFile.exists()).isFalse();
-  }
-
-  @Test
-  public void download_failedAndRetry() throws Exception {
-    AbstractDownloadWorker worker =
-        createWorker(
-            createData(/* reuseExistingFile= */ false),
-            /* runAttemptCount= */ 0,
-            TestModelDownloader.withFailure(new Exception()),
-            NO_OP_HANDLE_FUNC);
-    targetModelFile.delete();
-
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.retry());
-    assertThat(targetModelFile.exists()).isFalse();
-  }
-
-  @Test
-  public void download_failedTooManyAttempts() throws Exception {
-    AbstractDownloadWorker worker =
-        createWorker(
-            createData(/* reuseExistingFile= */ false),
-            WORKER_MAX_DOWNLOAD_ATTEMPTS,
-            TestModelDownloader.withSuccess(CONTENT_BYTES),
-            NO_OP_HANDLE_FUNC);
-    targetModelFile.delete();
-
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.failure());
-    assertThat(targetModelFile.exists()).isFalse();
-  }
-
-  @Test
-  public void download_errorWhenHandlingDownloadedFile() throws Exception {
-    AbstractDownloadWorker worker =
-        createWorker(
-            createData(/* reuseExistingFile= */ false),
-            /* runAttemptCount= */ 0,
-            TestModelDownloader.withSuccess(""),
-            file -> {
-              throw new RuntimeException();
-            });
-    targetModelFile.delete();
-
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.retry());
-    // Downlaoded file should be cleaned up if hanlding function fails
-    assertThat(targetModelFile.exists()).isFalse();
-  }
-
-  private Data createData(boolean reuseExistingFile) {
-    return AbstractDownloadWorker.createInputDataBuilder(
-            URL, targetModelFile.getAbsolutePath(), reuseExistingFile, WORKER_MAX_DOWNLOAD_ATTEMPTS)
-        .build();
-  }
-
-  private static AbstractDownloadWorker createWorker(
-      Data data, int runAttemptCount, ModelDownloader downloader, Function<File, Void> handleFunc) {
-    return TestListenableWorkerBuilder.from(
-            ApplicationProvider.getApplicationContext(), TestDownloadWorker.class)
-        .setInputData(data)
-        .setRunAttemptCount(runAttemptCount)
-        .setWorkerFactory(
-            new WorkerFactory() {
-              @Override
-              public ListenableWorker createWorker(
-                  Context appContext, String workerClassName, WorkerParameters workerParameters) {
-                return new TestDownloadWorker(
-                    appContext,
-                    workerParameters,
-                    MoreExecutors.newDirectExecutorService(),
-                    downloader,
-                    handleFunc);
-              }
-            })
-        .build();
-  }
-
-  /** A test AbstractDownloadWorker impl which handles downloaded file with a given Function. */
-  private static class TestDownloadWorker extends AbstractDownloadWorker {
-    private final Function<File, Void> handleFunc;
-
-    TestDownloadWorker(
-        Context context,
-        WorkerParameters workerParameters,
-        ExecutorService bgExecutorService,
-        ModelDownloader modelDownloader,
-        Function<File, Void> handleFunc) {
-      super(context, workerParameters, bgExecutorService, modelDownloader);
-
-      this.handleFunc = handleFunc;
-    }
-
-    @Override
-    Void handleDownloadedFile(File downloadedFile) {
-      return handleFunc.apply(downloadedFile);
-    }
-  }
-
-  /** A ModelDownloader implementation for testing. Set expected resilts in its constructor. */
-  private static class TestModelDownloader implements ModelDownloader {
-    private final String strWrittenToFile;
-    private final ListenableFuture<Long> futureToReturn;
-
-    public static TestModelDownloader withSuccess(String strWrittenToFile) {
-      return new TestModelDownloader(
-          Futures.immediateFuture((long) strWrittenToFile.getBytes().length), strWrittenToFile);
-    }
-
-    public static TestModelDownloader withFailure(Throwable throwable) {
-      return new TestModelDownloader(Futures.immediateFailedFuture(throwable), null);
-    }
-
-    private TestModelDownloader(ListenableFuture<Long> futureToReturn, String strWrittenToFile) {
-      this.strWrittenToFile = strWrittenToFile;
-      this.futureToReturn = futureToReturn;
-    }
-
-    @Override
-    public ListenableFuture<Long> download(URI uri, File targetFile) {
-      if (strWrittenToFile != null) {
-        try {
-          targetFile.createNewFile();
-          FileWriter fileWriter = new FileWriter(targetFile);
-          fileWriter.write(strWrittenToFile, /* off= */ 0, strWrittenToFile.length());
-          fileWriter.close();
-        } catch (Exception e) {
-          throw new RuntimeException("Failed to prepare test downloadeded file.", e);
-        }
-      }
-      return futureToReturn;
-    }
-  }
-}
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/ActionsSuggestionsHelperTest.java b/java/tests/instrumentation/src/com/android/textclassifier/ActionsSuggestionsHelperTest.java
index 427e89e..ebfeed3 100644
--- a/java/tests/instrumentation/src/com/android/textclassifier/ActionsSuggestionsHelperTest.java
+++ b/java/tests/instrumentation/src/com/android/textclassifier/ActionsSuggestionsHelperTest.java
@@ -226,7 +226,7 @@
   public void createLabeledIntentResult_null() {
     ActionsSuggestionsModel.ActionSuggestion nativeSuggestion =
         new ActionsSuggestionsModel.ActionSuggestion(
-            "text", ConversationAction.TYPE_OPEN_URL, 1.0f, null, null, null);
+            "text", ConversationAction.TYPE_OPEN_URL, 1.0f, null, null, null, null);
 
     LabeledIntent.Result labeledIntentResult =
         ActionsSuggestionsHelper.createLabeledIntentResult(
@@ -246,7 +246,8 @@
             1.0f,
             null,
             null,
-            new RemoteActionTemplate[0]);
+            new RemoteActionTemplate[0],
+            null);
 
     LabeledIntent.Result labeledIntentResult =
         ActionsSuggestionsHelper.createLabeledIntentResult(
@@ -280,7 +281,8 @@
                   null,
                   null,
                   0)
-            });
+            },
+            null);
 
     LabeledIntent.Result labeledIntentResult =
         ActionsSuggestionsHelper.createLabeledIntentResult(
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/DefaultTextClassifierServiceTest.java b/java/tests/instrumentation/src/com/android/textclassifier/DefaultTextClassifierServiceTest.java
index 1c4f7f8..746931b 100644
--- a/java/tests/instrumentation/src/com/android/textclassifier/DefaultTextClassifierServiceTest.java
+++ b/java/tests/instrumentation/src/com/android/textclassifier/DefaultTextClassifierServiceTest.java
@@ -42,6 +42,8 @@
 import com.android.os.AtomsProto.TextClassifierApiUsageReported;
 import com.android.os.AtomsProto.TextClassifierApiUsageReported.ApiType;
 import com.android.os.AtomsProto.TextClassifierApiUsageReported.ResultType;
+import com.android.textclassifier.common.ModelFileManager;
+import com.android.textclassifier.common.TextClassifierSettings;
 import com.android.textclassifier.common.statsd.StatsdTestUtils;
 import com.android.textclassifier.common.statsd.TextClassifierApiUsageLogger;
 import com.google.common.base.Preconditions;
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/DownloaderTestUtils.java b/java/tests/instrumentation/src/com/android/textclassifier/DownloaderTestUtils.java
deleted file mode 100644
index 980dda3..0000000
--- a/java/tests/instrumentation/src/com/android/textclassifier/DownloaderTestUtils.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import android.content.Context;
-import androidx.work.ListenableWorker;
-import androidx.work.WorkInfo;
-import androidx.work.WorkManager;
-import androidx.work.WorkQuery;
-import androidx.work.WorkerParameters;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import java.util.List;
-
-/** Utils for downloader logic testing. */
-final class DownloaderTestUtils {
-
-  /** One unique queue holds at most one request at one time. Returns null if no WorkInfo found. */
-  public static WorkInfo queryTheOnlyWorkInfo(WorkManager workManager, String queueName)
-      throws Exception {
-    WorkQuery workQuery =
-        WorkQuery.Builder.fromUniqueWorkNames(ImmutableList.of(queueName)).build();
-    List<WorkInfo> workInfos = workManager.getWorkInfos(workQuery).get();
-    if (workInfos.isEmpty()) {
-      return null;
-    } else {
-      return Iterables.getOnlyElement(workInfos);
-    }
-  }
-
-  /**
-   * Completes immediately with the pre-set result. If it's not retry, the result will also include
-   * the input Data as its output Data.
-   */
-  public static final class TestWorker extends ListenableWorker {
-    private static Result expectedResult;
-
-    public TestWorker(Context context, WorkerParameters workerParams) {
-      super(context, workerParams);
-    }
-
-    @Override
-    public ListenableFuture<ListenableWorker.Result> startWork() {
-      if (expectedResult == null) {
-        return Futures.immediateFailedFuture(new Exception("no expected result"));
-      }
-      ListenableWorker.Result result;
-      switch (expectedResult) {
-        case SUCCESS:
-          result = ListenableWorker.Result.success(getInputData());
-          break;
-        case FAILURE:
-          result = ListenableWorker.Result.failure(getInputData());
-          break;
-        case RETRY:
-          result = ListenableWorker.Result.retry();
-          break;
-        default:
-          throw new IllegalStateException("illegal result");
-      }
-      // Reset expected result
-      expectedResult = null;
-      return Futures.immediateFuture(result);
-    }
-
-    /** Sets the expected worker result in a static variable. Will be cleaned up after reading. */
-    public static void setExpectedResult(Result expectedResult) {
-      TestWorker.expectedResult = expectedResult;
-    }
-
-    public enum Result {
-      SUCCESS,
-      FAILURE,
-      RETRY;
-    }
-  }
-
-  private DownloaderTestUtils() {}
-}
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/ManifestDownloadWorkerTest.java b/java/tests/instrumentation/src/com/android/textclassifier/ManifestDownloadWorkerTest.java
deleted file mode 100644
index 38fdf47..0000000
--- a/java/tests/instrumentation/src/com/android/textclassifier/ManifestDownloadWorkerTest.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import static com.google.common.truth.Truth.assertThat;
-
-import android.content.Context;
-import androidx.test.core.app.ApplicationProvider;
-import androidx.work.ListenableWorker;
-import androidx.work.WorkInfo;
-import androidx.work.WorkManager;
-import androidx.work.WorkerFactory;
-import androidx.work.WorkerParameters;
-import androidx.work.testing.TestDriver;
-import androidx.work.testing.TestListenableWorkerBuilder;
-import androidx.work.testing.WorkManagerTestInitHelper;
-import com.android.textclassifier.ModelFileManager.ModelType;
-import java.io.File;
-import java.nio.file.Files;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-@RunWith(JUnit4.class)
-public final class ManifestDownloadWorkerTest {
-  private static final String MODEL_URL =
-      "https://www.gstatic.com/android/text_classifier/q/v711/en.fb";
-  private static final long MODEL_SIZE_IN_BYTES = 1L;
-  private static final String MODEL_FINGERPRINT = "hash_fingerprint";
-  private static final String MANIFEST_URL =
-      "https://www.gstatic.com/android/text_classifier/q/v711/en.fb.manifest";
-  private static final String TARGET_MODEL_PATH = "/not_used_fake_path.fb";
-  private static final String MODEL_TYPE = ModelType.ANNOTATOR;
-  private static final String MODEL_LANGUAGE_TAG = "en";
-  private static final String WORK_MANAGER_UNIQUE_WORK_NAME =
-      ModelDownloadManager.getModelUniqueWorkName(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-  private static final int WORKER_MAX_DOWNLOAD_ATTEMPTS = 5;
-  private static final ModelManifest MODEL_MANIFEST_PROTO =
-      ModelManifest.newBuilder()
-          .addModels(
-              ModelManifest.Model.newBuilder()
-                  .setUrl(MODEL_URL)
-                  .setSizeInBytes(MODEL_SIZE_IN_BYTES)
-                  .setFingerprint(MODEL_FINGERPRINT)
-                  .build())
-          .build();
-
-  private File manifestFile;
-  private WorkManager workManager;
-  private TestDriver workManagerTestDriver;
-
-  @Before
-  public void setUp() {
-    Context context = ApplicationProvider.getApplicationContext();
-    WorkManagerTestInitHelper.initializeTestWorkManager(context);
-
-    this.manifestFile = new File(context.getCacheDir(), "model.fb.manifest");
-    this.workManager = WorkManager.getInstance(context);
-    this.workManagerTestDriver = WorkManagerTestInitHelper.getTestDriver(context);
-
-    manifestFile.deleteOnExit();
-  }
-
-  @Test
-  public void enqueueSuccessfullyAndCheckData() throws Exception {
-    ManifestDownloadWorker worker = createWorker(MANIFEST_URL, manifestFile.getAbsolutePath());
-
-    // We only want to test the downloaded file handling code, so reuse existing manifest file
-    manifestFile.createNewFile();
-    Files.write(manifestFile.toPath(), MODEL_MANIFEST_PROTO.toByteArray());
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.success());
-    assertThat(manifestFile.exists()).isTrue();
-
-    WorkInfo workInfo =
-        DownloaderTestUtils.queryTheOnlyWorkInfo(workManager, WORK_MANAGER_UNIQUE_WORK_NAME);
-    assertThat(workInfo).isNotNull();
-    assertThat(workInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    assertThat(workInfo.getTags()).contains(MANIFEST_URL);
-
-    // Check input Data with TestWorker
-    DownloaderTestUtils.TestWorker.setExpectedResult(DownloaderTestUtils.TestWorker.Result.SUCCESS);
-    workManagerTestDriver.setAllConstraintsMet(workInfo.getId());
-
-    WorkInfo newWorkInfo =
-        DownloaderTestUtils.queryTheOnlyWorkInfo(workManager, WORK_MANAGER_UNIQUE_WORK_NAME);
-    assertThat(newWorkInfo.getId()).isEqualTo(workInfo.getId());
-    assertThat(newWorkInfo.getState()).isEqualTo(WorkInfo.State.SUCCEEDED);
-    assertThat(newWorkInfo.getOutputData().getString(AbstractDownloadWorker.DATA_URL_KEY))
-        .isEqualTo(MODEL_URL);
-    assertThat(
-            newWorkInfo
-                .getOutputData()
-                .getLong(ModelDownloadWorker.DATA_MODEL_SIZE_IN_BYTES_KEY, /* defaultValue= */ -1))
-        .isEqualTo(MODEL_SIZE_IN_BYTES);
-    assertThat(
-            newWorkInfo.getOutputData().getString(ModelDownloadWorker.DATA_MODEL_FINGERPRINT_KEY))
-        .isEqualTo(MODEL_FINGERPRINT);
-    assertThat(
-            newWorkInfo.getOutputData().getString(ModelDownloadWorker.DATA_TARGET_MODEL_PATH_KEY))
-        .isEqualTo(TARGET_MODEL_PATH);
-  }
-
-  @Test
-  public void invalidManifestFile_invalidFileDeletedAndRetry() throws Exception {
-    ManifestDownloadWorker worker = createWorker(MANIFEST_URL, manifestFile.getAbsolutePath());
-
-    manifestFile.createNewFile();
-    Files.write(manifestFile.toPath(), "random_content".getBytes());
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.retry());
-    assertThat(manifestFile.exists()).isFalse();
-  }
-
-  private static ManifestDownloadWorker createWorker(String manifestUrl, String manifestPath) {
-    return TestListenableWorkerBuilder.from(
-            ApplicationProvider.getApplicationContext(), ManifestDownloadWorker.class)
-        .setInputData(
-            ManifestDownloadWorker.createInputData(
-                MODEL_TYPE,
-                MODEL_LANGUAGE_TAG,
-                manifestUrl,
-                manifestPath,
-                TARGET_MODEL_PATH,
-                WORKER_MAX_DOWNLOAD_ATTEMPTS,
-                /* reuseExistingManifestFile= */ true))
-        .setRunAttemptCount(0)
-        .setWorkerFactory(
-            new WorkerFactory() {
-              @Override
-              public ListenableWorker createWorker(
-                  Context appContext, String workerClassName, WorkerParameters workerParameters) {
-                return new ManifestDownloadWorker(
-                    appContext, workerParameters, DownloaderTestUtils.TestWorker.class);
-              }
-            })
-        .build();
-  }
-}
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloadManagerTest.java b/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloadManagerTest.java
deleted file mode 100644
index 8564130..0000000
--- a/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloadManagerTest.java
+++ /dev/null
@@ -1,412 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import static com.google.common.truth.Truth.assertThat;
-
-import android.content.Context;
-import android.os.LocaleList;
-import android.provider.DeviceConfig.Properties;
-import androidx.annotation.NonNull;
-import androidx.test.core.app.ApplicationProvider;
-import androidx.test.ext.junit.runners.AndroidJUnit4;
-import androidx.work.ExistingWorkPolicy;
-import androidx.work.OneTimeWorkRequest;
-import androidx.work.WorkInfo;
-import androidx.work.WorkManager;
-import androidx.work.testing.TestDriver;
-import androidx.work.testing.WorkManagerTestInitHelper;
-import com.android.textclassifier.ModelFileManager.ModelType;
-import com.android.textclassifier.testing.SetDefaultLocalesRule;
-import com.google.common.collect.ImmutableList;
-import com.google.common.util.concurrent.MoreExecutors;
-import java.io.File;
-import java.util.HashMap;
-import java.util.Locale;
-import javax.annotation.Nullable;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.MockitoAnnotations;
-
-@RunWith(AndroidJUnit4.class)
-public final class ModelDownloadManagerTest {
-  private static final String URL_PREFIX = ModelDownloadManager.TEXT_CLASSIFIER_URL_PREFIX;
-  private static final String URL_SUFFIX = "abc.xyz";
-  private static final String URL_SUFFIX_2 = "def.xyz";
-  private static final String URL = URL_PREFIX + URL_SUFFIX;
-  private static final String URL_2 = URL_PREFIX + URL_SUFFIX_2;
-  // Parameterized test is not yet supported for instrumentation test
-  @ModelType.ModelTypeDef private static final String MODEL_TYPE = ModelType.ANNOTATOR;
-  @ModelType.ModelTypeDef private static final String MODEL_TYPE_2 = ModelType.ACTIONS_SUGGESTIONS;
-  private static final String MODEL_LANGUAGE_TAG = "en";
-  private static final String MODEL_LANGUAGE_TAG_2 = "zh";
-  private static final String MODEL_LANGUAGE_UNIVERSAL_TAG =
-      ModelDownloadManager.UNIVERSAL_MODEL_LANGUAGE_TAG;
-  private static final LocaleList DEFAULT_LOCALE_LIST =
-      new LocaleList(new Locale(MODEL_LANGUAGE_TAG));
-
-  @Rule public final SetDefaultLocalesRule setDefaultLocalesRule = new SetDefaultLocalesRule();
-
-  // TODO(licha): Maybe we can just use the real TextClassifierSettings
-  private FakeDeviceConfig fakeDeviceConfig;
-  private WorkManager workManager;
-  private TestDriver workManagerTestDriver;
-  private File downloadTargetFile;
-  private ModelDownloadManager downloadManager;
-
-  @Before
-  public void setUp() {
-    MockitoAnnotations.initMocks(this);
-    Context context = ApplicationProvider.getApplicationContext();
-    WorkManagerTestInitHelper.initializeTestWorkManager(context);
-
-    this.fakeDeviceConfig = new FakeDeviceConfig();
-    this.workManager = WorkManager.getInstance(context);
-    this.workManagerTestDriver = WorkManagerTestInitHelper.getTestDriver(context);
-    ModelFileManager modelFileManager = new ModelFileManager(context, ImmutableList.of());
-    this.downloadTargetFile = modelFileManager.getDownloadTargetFile(MODEL_TYPE, URL_SUFFIX);
-    this.downloadManager =
-        new ModelDownloadManager(
-            workManager,
-            DownloaderTestUtils.TestWorker.class,
-            modelFileManager,
-            new TextClassifierSettings(fakeDeviceConfig),
-            MoreExecutors.newDirectExecutorService());
-    setDefaultLocalesRule.set(DEFAULT_LOCALE_LIST);
-  }
-
-  @After
-  public void tearDown() {
-    recursiveDelete(ApplicationProvider.getApplicationContext().getFilesDir());
-  }
-
-  @Test
-  public void init_checkConfigWhenInit() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.init();
-
-    WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_flagNotSet() throws Exception {
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo).isNull();
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_fileAlreadyExists() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    try {
-      downloadTargetFile.getParentFile().mkdirs();
-      downloadTargetFile.createNewFile();
-      downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-      WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-      assertThat(workInfo).isNull();
-    } finally {
-      downloadTargetFile.delete();
-    }
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_doNotRedownloadTheSameModel() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    // Simulates a previous model download task
-    OneTimeWorkRequest modelDownloadRequest =
-        new OneTimeWorkRequest.Builder(DownloaderTestUtils.TestWorker.class).addTag(URL).build();
-    DownloaderTestUtils.TestWorker.setExpectedResult(DownloaderTestUtils.TestWorker.Result.SUCCESS);
-    workManager
-        .enqueueUniqueWork(
-            ModelDownloadManager.getModelUniqueWorkName(MODEL_TYPE, MODEL_LANGUAGE_TAG),
-            ExistingWorkPolicy.REPLACE,
-            modelDownloadRequest)
-        .getResult()
-        .get();
-
-    // Assert the model download work succeeded
-    WorkInfo succeededModelWorkInfo =
-        DownloaderTestUtils.queryTheOnlyWorkInfo(
-            workManager,
-            ModelDownloadManager.getModelUniqueWorkName(MODEL_TYPE, MODEL_LANGUAGE_TAG));
-    assertThat(succeededModelWorkInfo.getState()).isEqualTo(WorkInfo.State.SUCCEEDED);
-
-    // Trigger the config check
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-    WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo).isNull();
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_requestEnqueuedSuccessfully() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_multipleModelsEnqueued() throws Exception {
-    for (@ModelType.ModelTypeDef String modelType : ModelType.values()) {
-      setUpModelUrlSuffix(modelType, MODEL_LANGUAGE_TAG, modelType + URL_SUFFIX);
-    }
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    for (@ModelType.ModelTypeDef String modelType : ModelType.values()) {
-      WorkInfo workInfo = queryTheOnlyWorkInfo(modelType, MODEL_LANGUAGE_TAG);
-      assertThat(workInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    }
-  }
-
-  // This test is to make sure we will not schedule a new task if another task exists with the same
-  // url tag, even if it's in a different queue. Currently we schedule both manifest and model
-  // download tasks with the same model url tag. This behavior protects us from unintended task
-  // overriding.
-  @Test
-  public void checkConfigAndScheduleDownloads_urlIsCheckedGlobally() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    WorkInfo workInfo1 = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo1.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-
-    // Set the same url to a different model type flag
-    setUpModelUrlSuffix(MODEL_TYPE_2, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    workInfo1 = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo1.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    WorkInfo workInfo2 = queryTheOnlyWorkInfo(MODEL_TYPE_2, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo2).isNull();
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_checkMultipleTimes() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-    WorkInfo oldWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-    WorkInfo newWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-
-    // Will not schedule multiple times, still the same WorkInfo
-    assertThat(oldWorkInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    assertThat(newWorkInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    assertThat(oldWorkInfo.getId()).isEqualTo(newWorkInfo.getId());
-    assertThat(oldWorkInfo.getTags()).containsExactlyElementsIn(newWorkInfo.getTags());
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_flagUpdatedWhilePrevDownloadPending()
-      throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-    WorkInfo oldWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX_2);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-    WorkInfo newWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-
-    // oldWorkInfo will be replaced with the newWorkInfo
-    assertThat(oldWorkInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    assertThat(newWorkInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    assertThat(oldWorkInfo.getId()).isNotEqualTo(newWorkInfo.getId());
-    assertThat(oldWorkInfo.getTags()).contains(URL);
-    assertThat(newWorkInfo.getTags()).contains(URL_2);
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_flagUpdatedAfterPrevDownloadDone() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-    WorkInfo oldWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    // Run scheduled download
-    DownloaderTestUtils.TestWorker.setExpectedResult(DownloaderTestUtils.TestWorker.Result.SUCCESS);
-    workManagerTestDriver.setAllConstraintsMet(oldWorkInfo.getId());
-    try {
-      // Create download file
-      downloadTargetFile.createNewFile();
-      downloadManager.checkConfigAndScheduleDownloadsForTesting();
-      // Update device config
-      setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX_2);
-      downloadManager.checkConfigAndScheduleDownloadsForTesting();
-      WorkInfo newWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-
-      // Assert new request can be queued successfully
-      assertThat(newWorkInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-      assertThat(newWorkInfo.getTags()).contains(URL_2);
-      assertThat(oldWorkInfo.getId()).isNotEqualTo(newWorkInfo.getId());
-    } finally {
-      downloadTargetFile.delete();
-    }
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_workerSucceeded() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-
-    DownloaderTestUtils.TestWorker.setExpectedResult(DownloaderTestUtils.TestWorker.Result.SUCCESS);
-    workManagerTestDriver.setAllConstraintsMet(workInfo.getId());
-
-    WorkInfo newWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(newWorkInfo.getId()).isEqualTo(workInfo.getId());
-    assertThat(newWorkInfo.getState()).isEqualTo(WorkInfo.State.SUCCEEDED);
-    assertThat(newWorkInfo.getOutputData().getString(AbstractDownloadWorker.DATA_URL_KEY))
-        .isEqualTo(URL);
-    assertThat(
-            newWorkInfo.getOutputData().getString(AbstractDownloadWorker.DATA_DESTINATION_PATH_KEY))
-        .isEqualTo(
-            ModelDownloadManager.getTargetManifestPath(downloadTargetFile.getAbsolutePath()));
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_workerFailed() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-
-    DownloaderTestUtils.TestWorker.setExpectedResult(DownloaderTestUtils.TestWorker.Result.FAILURE);
-    workManagerTestDriver.setAllConstraintsMet(workInfo.getId());
-
-    WorkInfo newWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(newWorkInfo.getId()).isEqualTo(workInfo.getId());
-    assertThat(newWorkInfo.getState()).isEqualTo(WorkInfo.State.FAILED);
-    assertThat(newWorkInfo.getOutputData().getString(AbstractDownloadWorker.DATA_URL_KEY))
-        .isEqualTo(URL);
-    assertThat(
-            newWorkInfo.getOutputData().getString(AbstractDownloadWorker.DATA_DESTINATION_PATH_KEY))
-        .isEqualTo(
-            ModelDownloadManager.getTargetManifestPath(downloadTargetFile.getAbsolutePath()));
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_workerRetried() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG, URL_SUFFIX);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(workInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-
-    DownloaderTestUtils.TestWorker.setExpectedResult(DownloaderTestUtils.TestWorker.Result.RETRY);
-    workManagerTestDriver.setAllConstraintsMet(workInfo.getId());
-
-    WorkInfo newWorkInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG);
-    assertThat(newWorkInfo.getId()).isEqualTo(workInfo.getId());
-    assertThat(newWorkInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    assertThat(newWorkInfo.getRunAttemptCount()).isEqualTo(1);
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_chooseTheBestLocaleTag() throws Exception {
-    // System default locale: zh-hant-hk
-    setDefaultLocalesRule.set(new LocaleList(Locale.forLanguageTag("zh-hant-hk")));
-
-    // All configured locale tags
-    setUpModelUrlSuffix(MODEL_TYPE, "zh-hant", URL_SUFFIX); // best match
-    setUpModelUrlSuffix(MODEL_TYPE, "zh", URL_SUFFIX_2); // too general
-    setUpModelUrlSuffix(MODEL_TYPE, "zh-hk", URL_SUFFIX_2); // missing script
-    setUpModelUrlSuffix(MODEL_TYPE, "zh-hans-hk", URL_SUFFIX_2); // incorrect script
-    setUpModelUrlSuffix(MODEL_TYPE, "es-hant-hk", URL_SUFFIX_2); // incorrect language
-
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    // The downloader choose: zh-hant
-    assertThat(queryTheOnlyWorkInfo(MODEL_TYPE, "zh-hant").getState())
-        .isEqualTo(WorkInfo.State.ENQUEUED);
-
-    assertThat(queryTheOnlyWorkInfo(MODEL_TYPE, "zh")).isNull();
-    assertThat(queryTheOnlyWorkInfo(MODEL_TYPE, "zh-hk")).isNull();
-    assertThat(queryTheOnlyWorkInfo(MODEL_TYPE, "zh-hans-hk")).isNull();
-    assertThat(queryTheOnlyWorkInfo(MODEL_TYPE, "es-hant-hk")).isNull();
-  }
-
-  @Test
-  public void checkConfigAndScheduleDownloads_useUniversalModelIfNoMatchedTag() throws Exception {
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_TAG_2, URL_SUFFIX);
-    setUpModelUrlSuffix(MODEL_TYPE, MODEL_LANGUAGE_UNIVERSAL_TAG, URL_SUFFIX_2);
-    downloadManager.checkConfigAndScheduleDownloadsForTesting();
-
-    assertThat(queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_TAG_2)).isNull();
-
-    WorkInfo workInfo = queryTheOnlyWorkInfo(MODEL_TYPE, MODEL_LANGUAGE_UNIVERSAL_TAG);
-    assertThat(workInfo.getState()).isEqualTo(WorkInfo.State.ENQUEUED);
-    assertThat(workInfo.getTags()).contains(URL_2);
-  }
-
-  private void setUpModelUrlSuffix(
-      @ModelType.ModelTypeDef String modelType, String modelLanguageTag, String urlSuffix) {
-    String deviceConfigFlag =
-        String.format(
-            TextClassifierSettings.MANIFEST_URL_SUFFIX_TEMPLATE, modelType, modelLanguageTag);
-    fakeDeviceConfig.setConfig(deviceConfigFlag, urlSuffix);
-  }
-
-  /** One unique queue holds at most one request at one time. Returns null if no WorkInfo found. */
-  private WorkInfo queryTheOnlyWorkInfo(
-      @ModelType.ModelTypeDef String modelType, String modelLanguageTag) throws Exception {
-    return DownloaderTestUtils.queryTheOnlyWorkInfo(
-        workManager, ModelDownloadManager.getManifestUniqueWorkName(modelType, modelLanguageTag));
-  }
-
-  private static void recursiveDelete(File f) {
-    if (f.isDirectory()) {
-      for (File innerFile : f.listFiles()) {
-        recursiveDelete(innerFile);
-      }
-    }
-    f.delete();
-  }
-
-  private static class FakeDeviceConfig implements TextClassifierSettings.IDeviceConfig {
-
-    private final HashMap<String, String> configs;
-
-    public FakeDeviceConfig() {
-      this.configs = new HashMap<>();
-    }
-
-    public void setConfig(String key, String value) {
-      configs.put(key, value);
-    }
-
-    @Override
-    public Properties getProperties(@NonNull String namespace, @NonNull String... names) {
-      Properties.Builder builder = new Properties.Builder(namespace);
-      for (String key : configs.keySet()) {
-        builder.setString(key, configs.get(key));
-      }
-      return builder.build();
-    }
-
-    @Override
-    public String getString(
-        @NonNull String namespace, @NonNull String name, @Nullable String defaultValue) {
-      return configs.containsKey(name) ? configs.get(name) : defaultValue;
-    }
-  }
-}
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloadWorkerTest.java b/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloadWorkerTest.java
deleted file mode 100644
index 107aa95..0000000
--- a/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloadWorkerTest.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import static com.google.common.truth.Truth.assertThat;
-import static org.testng.Assert.expectThrows;
-
-import android.content.Context;
-import androidx.test.core.app.ApplicationProvider;
-import androidx.work.ListenableWorker;
-import androidx.work.WorkerFactory;
-import androidx.work.WorkerParameters;
-import androidx.work.testing.TestListenableWorkerBuilder;
-import androidx.work.testing.WorkManagerTestInitHelper;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-@RunWith(JUnit4.class)
-public final class ModelDownloadWorkerTest {
-  private static final String MODEL_URL =
-      "http://www.gstatic.com/android/text_classifier/q/v711/en.fb";
-  private static final String MODEL_CONTENT = "content";
-  private static final String MODEL_CONTENT_CORRUPTED = "CONTENT";
-  private static final long MODEL_SIZE_IN_BYTES = 7L;
-  private static final String MODEL_FINGERPRINT =
-      "5406ebea1618e9b73a7290c5d716f0b47b4f1fbc5d8c"
-          + "5e78c9010a3e01c18d8594aa942e3536f7e01574245d34647523";
-  private static final int WORKER_MAX_DOWNLOAD_ATTEMPTS = 5;
-
-  private File manifestFile;
-  private File pendingModelFile;
-  private File targetModelFile;
-
-  @Before
-  public void setUp() {
-    Context context = ApplicationProvider.getApplicationContext();
-    WorkManagerTestInitHelper.initializeTestWorkManager(context);
-
-    this.manifestFile = new File(context.getCacheDir(), "model.fb.manifest");
-    this.pendingModelFile = new File(context.getCacheDir(), "model.fb.pending");
-    this.targetModelFile = new File(context.getCacheDir(), "model.fb");
-  }
-
-  @After
-  public void tearDown() {
-    manifestFile.delete();
-    pendingModelFile.delete();
-    targetModelFile.delete();
-  }
-
-  @Test
-  public void passedVerificationAndMoved() throws Exception {
-    ModelDownloadWorker worker = createWorker(manifestFile, pendingModelFile, targetModelFile);
-    manifestFile.createNewFile();
-    writeToFile(pendingModelFile, MODEL_CONTENT);
-
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.success());
-    assertThat(targetModelFile.exists()).isTrue();
-    assertThat(pendingModelFile.exists()).isFalse();
-    assertThat(manifestFile.exists()).isFalse();
-  }
-
-  @Test
-  public void passedVerificationAndReplaced() throws Exception {
-    ModelDownloadWorker worker = createWorker(manifestFile, pendingModelFile, targetModelFile);
-    manifestFile.createNewFile();
-    writeToFile(pendingModelFile, MODEL_CONTENT);
-    writeToFile(targetModelFile, MODEL_CONTENT);
-
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.success());
-    assertThat(targetModelFile.exists()).isTrue();
-    assertThat(pendingModelFile.exists()).isFalse();
-    assertThat(manifestFile.exists()).isFalse();
-  }
-
-  @Test
-  public void failedVerificationAndRetry() throws Exception {
-    ModelDownloadWorker worker = createWorker(manifestFile, pendingModelFile, targetModelFile);
-    manifestFile.createNewFile();
-    writeToFile(pendingModelFile, /* content= */ "");
-
-    assertThat(worker.startWork().get()).isEqualTo(ListenableWorker.Result.retry());
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(pendingModelFile.exists()).isFalse();
-    assertThat(manifestFile.exists()).isTrue();
-  }
-
-  @Test
-  public void validateModel_validationPassed() throws Exception {
-    ModelDownloadWorker worker = createWorker(manifestFile, pendingModelFile, targetModelFile);
-    writeToFile(pendingModelFile, MODEL_CONTENT);
-    worker.handleDownloadedFile(pendingModelFile);
-  }
-
-  @Test
-  public void validateModel_fileDoesNotExist() throws Exception {
-    ModelDownloadWorker worker = createWorker(manifestFile, pendingModelFile, targetModelFile);
-    pendingModelFile.delete();
-    IllegalStateException e =
-        expectThrows(
-            IllegalStateException.class, () -> worker.handleDownloadedFile(pendingModelFile));
-    assertThat(e).hasCauseThat().hasMessageThat().contains("does not exist");
-  }
-
-  @Test
-  public void validateModel_emptyFile() throws Exception {
-    ModelDownloadWorker worker = createWorker(manifestFile, pendingModelFile, targetModelFile);
-    writeToFile(pendingModelFile, /* content= */ "");
-    IllegalStateException e =
-        expectThrows(
-            IllegalStateException.class, () -> worker.handleDownloadedFile(pendingModelFile));
-    assertThat(e).hasCauseThat().hasMessageThat().contains("size does not match");
-  }
-
-  @Test
-  public void validateModel_corruptedContent() throws Exception {
-    ModelDownloadWorker worker = createWorker(manifestFile, pendingModelFile, targetModelFile);
-    writeToFile(pendingModelFile, MODEL_CONTENT_CORRUPTED);
-    IllegalStateException e =
-        expectThrows(
-            IllegalStateException.class, () -> worker.handleDownloadedFile(pendingModelFile));
-    assertThat(e).hasCauseThat().hasMessageThat().contains("fingerprint does not match");
-  }
-
-  private static ModelDownloadWorker createWorker(
-      File manifestFile, File pendingModelFile, File targetModelFile) {
-    return TestListenableWorkerBuilder.from(
-            ApplicationProvider.getApplicationContext(), ModelDownloadWorker.class)
-        .setInputData(
-            ModelDownloadWorker.createInputData(
-                MODEL_URL,
-                MODEL_SIZE_IN_BYTES,
-                MODEL_FINGERPRINT,
-                manifestFile.getAbsolutePath(),
-                pendingModelFile.getAbsolutePath(),
-                targetModelFile.getAbsolutePath(),
-                WORKER_MAX_DOWNLOAD_ATTEMPTS,
-                /* reuseExistingModelFile= */ true))
-        .setRunAttemptCount(0)
-        .setWorkerFactory(
-            new WorkerFactory() {
-              @Override
-              public ListenableWorker createWorker(
-                  Context appContext, String workerClassName, WorkerParameters workerParameters) {
-                return new ModelDownloadWorker(appContext, workerParameters);
-              }
-            })
-        .build();
-  }
-
-  private static void writeToFile(File file, String content) throws IOException {
-    file.createNewFile();
-    FileWriter fileWriter = new FileWriter(file);
-    fileWriter.write(content, /* off= */ 0, content.length());
-    fileWriter.close();
-  }
-}
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloaderImplTest.java b/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloaderImplTest.java
deleted file mode 100644
index 806172d..0000000
--- a/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloaderImplTest.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import static com.google.common.truth.Truth.assertThat;
-import static java.util.concurrent.TimeUnit.SECONDS;
-import static org.testng.Assert.expectThrows;
-
-import android.content.Context;
-import androidx.test.core.app.ApplicationProvider;
-import com.android.textclassifier.TestModelDownloaderService.DownloadResult;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.MoreExecutors;
-import java.io.File;
-import java.net.URI;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-@RunWith(JUnit4.class)
-public final class ModelDownloaderImplTest {
-  private static final URI TEST_URI = URI.create("test_uri");
-
-  private ModelDownloaderImpl modelDownloaderImpl;
-  private File targetFile;
-
-  @Before
-  public void setUp() {
-    Context context = ApplicationProvider.getApplicationContext();
-    this.modelDownloaderImpl =
-        new ModelDownloaderImpl(
-            context, MoreExecutors.newDirectExecutorService(), TestModelDownloaderService.class);
-    this.targetFile = new File(context.getCacheDir(), "targetFile.fb");
-  }
-
-  @After
-  public void tearDown() {
-    TestModelDownloaderService.reset();
-  }
-
-  @Test
-  public void download_failToBind() throws Exception {
-    assertThat(TestModelDownloaderService.hasEverBeenBound()).isFalse();
-    assertThat(TestModelDownloaderService.isBound()).isFalse();
-
-    TestModelDownloaderService.setBindSucceed(false);
-    ListenableFuture<Long> bytesWrittenFuture = modelDownloaderImpl.download(TEST_URI, targetFile);
-
-    expectThrows(Throwable.class, bytesWrittenFuture::get);
-    assertThat(TestModelDownloaderService.isBound()).isFalse();
-    assertThat(TestModelDownloaderService.hasEverBeenBound()).isFalse();
-  }
-
-  @Test
-  public void download_succeed() throws Exception {
-    assertThat(TestModelDownloaderService.hasEverBeenBound()).isFalse();
-    assertThat(TestModelDownloaderService.isBound()).isFalse();
-
-    TestModelDownloaderService.setBindSucceed(true);
-    TestModelDownloaderService.setDownloadResult(DownloadResult.SUCCEEDED);
-    ListenableFuture<Long> bytesWrittenFuture = modelDownloaderImpl.download(TEST_URI, targetFile);
-
-    assertThat(bytesWrittenFuture.get()).isEqualTo(TestModelDownloaderService.BYTES_WRITTEN);
-    assertThat(TestModelDownloaderService.getOnUnbindInvokedLatch().await(1L, SECONDS)).isTrue();
-    assertThat(TestModelDownloaderService.isBound()).isFalse();
-    assertThat(TestModelDownloaderService.hasEverBeenBound()).isTrue();
-  }
-
-  @Test
-  public void download_fail() throws Exception {
-    assertThat(TestModelDownloaderService.hasEverBeenBound()).isFalse();
-    assertThat(TestModelDownloaderService.isBound()).isFalse();
-
-    TestModelDownloaderService.setBindSucceed(true);
-    TestModelDownloaderService.setDownloadResult(DownloadResult.FAILED);
-    ListenableFuture<Long> bytesWrittenFuture = modelDownloaderImpl.download(TEST_URI, targetFile);
-
-    Throwable t = expectThrows(Throwable.class, bytesWrittenFuture::get);
-    assertThat(t).hasMessageThat().contains(TestModelDownloaderService.ERROR_MSG);
-    assertThat(TestModelDownloaderService.getOnUnbindInvokedLatch().await(1L, SECONDS)).isTrue();
-    assertThat(TestModelDownloaderService.isBound()).isFalse();
-    assertThat(TestModelDownloaderService.hasEverBeenBound()).isTrue();
-  }
-
-  @Test
-  public void download_cancelAndUnbind() throws Exception {
-    assertThat(TestModelDownloaderService.hasEverBeenBound()).isFalse();
-    assertThat(TestModelDownloaderService.isBound()).isFalse();
-
-    TestModelDownloaderService.setBindSucceed(true);
-    TestModelDownloaderService.setDownloadResult(DownloadResult.RUNNING_FOREVER);
-    ListenableFuture<Long> bytesWrittenFuture = modelDownloaderImpl.download(TEST_URI, targetFile);
-    bytesWrittenFuture.cancel(true);
-
-    expectThrows(Throwable.class, bytesWrittenFuture::get);
-    assertThat(TestModelDownloaderService.getOnUnbindInvokedLatch().await(1L, SECONDS)).isTrue();
-    assertThat(TestModelDownloaderService.isBound()).isFalse();
-    assertThat(TestModelDownloaderService.hasEverBeenBound()).isTrue();
-  }
-}
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloaderServiceImplTest.java b/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloaderServiceImplTest.java
deleted file mode 100644
index d50dc78..0000000
--- a/java/tests/instrumentation/src/com/android/textclassifier/ModelDownloaderServiceImplTest.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import static com.google.common.truth.Truth.assertThat;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.expectThrows;
-
-import androidx.test.core.app.ApplicationProvider;
-import com.google.android.downloader.DownloadConstraints;
-import com.google.android.downloader.DownloadRequest;
-import com.google.android.downloader.DownloadResult;
-import com.google.android.downloader.Downloader;
-import com.google.android.downloader.SimpleFileDownloadDestination;
-import com.google.common.util.concurrent.FluentFuture;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.google.common.util.concurrent.SettableFuture;
-import java.io.File;
-import java.net.URI;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-
-@RunWith(JUnit4.class)
-public final class ModelDownloaderServiceImplTest {
-  private static final long BYTES_WRITTEN = 1L;
-  private static final String DOWNLOAD_URI =
-      "https://www.gstatic.com/android/text_classifier/r/v999/en.fb";
-
-  @Mock private Downloader downloader;
-  private File targetModelFile;
-  private File targetMetadataFile;
-  private ModelDownloaderServiceImpl modelDownloaderServiceImpl;
-  private TestSuccessCallbackImpl successCallback;
-  private TestFailureCallbackImpl failureCallback;
-
-  @Before
-  public void setUp() {
-    MockitoAnnotations.initMocks(this);
-
-    this.targetModelFile =
-        new File(ApplicationProvider.getApplicationContext().getCacheDir(), "model.fb");
-    this.targetMetadataFile = ModelDownloaderServiceImpl.getMetadataFile(targetModelFile);
-    this.modelDownloaderServiceImpl =
-        new ModelDownloaderServiceImpl(MoreExecutors.newDirectExecutorService(), downloader);
-    this.successCallback = new TestSuccessCallbackImpl();
-    this.failureCallback = new TestFailureCallbackImpl();
-
-    targetModelFile.deleteOnExit();
-    targetMetadataFile.deleteOnExit();
-    when(downloader.newRequestBuilder(any(), any()))
-        .thenReturn(
-            DownloadRequest.newBuilder()
-                .setUri(URI.create(DOWNLOAD_URI))
-                .setDownloadConstraints(DownloadConstraints.NONE)
-                .setDestination(
-                    new SimpleFileDownloadDestination(targetModelFile, targetMetadataFile)));
-  }
-
-  @Test
-  public void download_succeeded() throws Exception {
-    targetModelFile.createNewFile();
-    targetMetadataFile.createNewFile();
-    when(downloader.execute(any()))
-        .thenReturn(
-            FluentFuture.from(Futures.immediateFuture(DownloadResult.create(BYTES_WRITTEN))));
-    modelDownloaderServiceImpl.download(
-        DOWNLOAD_URI, targetModelFile.getAbsolutePath(), successCallback);
-
-    assertThat(successCallback.getBytesWrittenFuture().get()).isEqualTo(BYTES_WRITTEN);
-    assertThat(targetModelFile.exists()).isTrue();
-    assertThat(targetMetadataFile.exists()).isFalse();
-  }
-
-  @Test
-  public void download_failed() throws Exception {
-    targetModelFile.createNewFile();
-    targetMetadataFile.createNewFile();
-    when(downloader.execute(any()))
-        .thenReturn(FluentFuture.from(Futures.immediateFailedFuture(new Exception("err_msg"))));
-    modelDownloaderServiceImpl.download(
-        DOWNLOAD_URI, targetModelFile.getAbsolutePath(), successCallback);
-
-    Throwable t =
-        expectThrows(Throwable.class, () -> successCallback.getBytesWrittenFuture().get());
-    assertThat(t).hasMessageThat().contains("err_msg");
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(targetMetadataFile.exists()).isFalse();
-  }
-
-  @Test
-  public void download_succeeded_callbackFailed() throws Exception {
-    targetModelFile.createNewFile();
-    targetMetadataFile.createNewFile();
-    when(downloader.execute(any()))
-        .thenReturn(
-            FluentFuture.from(Futures.immediateFuture(DownloadResult.create(BYTES_WRITTEN))));
-    modelDownloaderServiceImpl.download(
-        DOWNLOAD_URI, targetModelFile.getAbsolutePath(), failureCallback);
-
-    assertThat(failureCallback.onSuccessCalled).isTrue();
-    assertThat(targetModelFile.exists()).isTrue();
-    assertThat(targetMetadataFile.exists()).isFalse();
-  }
-
-  @Test
-  public void download_failed_callbackFailed() throws Exception {
-    targetModelFile.createNewFile();
-    targetMetadataFile.createNewFile();
-    when(downloader.execute(any()))
-        .thenReturn(FluentFuture.from(Futures.immediateFailedFuture(new Exception("err_msg"))));
-    modelDownloaderServiceImpl.download(
-        DOWNLOAD_URI, targetModelFile.getAbsolutePath(), failureCallback);
-
-    assertThat(failureCallback.onFailureCalled).isTrue();
-    assertThat(targetModelFile.exists()).isFalse();
-    assertThat(targetMetadataFile.exists()).isFalse();
-  }
-
-  // NOTICE: Had some problem mocking this AIDL interface, so created fake impls
-  private static final class TestSuccessCallbackImpl extends IModelDownloaderCallback.Stub {
-    private final SettableFuture<Long> bytesWrittenFuture = SettableFuture.<Long>create();
-
-    public ListenableFuture<Long> getBytesWrittenFuture() {
-      return bytesWrittenFuture;
-    }
-
-    @Override
-    public void onSuccess(long bytesWritten) {
-      bytesWrittenFuture.set(bytesWritten);
-    }
-
-    @Override
-    public void onFailure(String error) {
-      bytesWrittenFuture.setException(new RuntimeException(error));
-    }
-  }
-
-  private static final class TestFailureCallbackImpl extends IModelDownloaderCallback.Stub {
-    public boolean onSuccessCalled = false;
-    public boolean onFailureCalled = false;
-
-    @Override
-    public void onSuccess(long bytesWritten) {
-      onSuccessCalled = true;
-      throw new RuntimeException();
-    }
-
-    @Override
-    public void onFailure(String error) {
-      onFailureCalled = true;
-      throw new RuntimeException();
-    }
-  }
-}
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/TestDataUtils.java b/java/tests/instrumentation/src/com/android/textclassifier/TestDataUtils.java
index 7565a0b..5c1d95e 100644
--- a/java/tests/instrumentation/src/com/android/textclassifier/TestDataUtils.java
+++ b/java/tests/instrumentation/src/com/android/textclassifier/TestDataUtils.java
@@ -17,8 +17,9 @@
 package com.android.textclassifier;
 
 import android.content.Context;
-import com.android.textclassifier.ModelFileManager.ModelType;
-import com.android.textclassifier.ModelFileManager.RegularFileFullMatchLister;
+import com.android.textclassifier.common.ModelFileManager;
+import com.android.textclassifier.common.ModelFileManager.RegularFileFullMatchLister;
+import com.android.textclassifier.common.ModelType;
 import com.google.common.collect.ImmutableList;
 import java.io.File;
 
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/TestModelDownloaderService.java b/java/tests/instrumentation/src/com/android/textclassifier/TestModelDownloaderService.java
deleted file mode 100644
index ddef5c1..0000000
--- a/java/tests/instrumentation/src/com/android/textclassifier/TestModelDownloaderService.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.textclassifier;
-
-import android.app.Service;
-import android.content.Intent;
-import android.os.IBinder;
-import java.util.concurrent.CountDownLatch;
-
-/** Test Service of IModelDownloaderService. */
-public final class TestModelDownloaderService extends Service {
-  public static final String GOOD_URI = "good_uri";
-  public static final String BAD_URI = "bad_uri";
-  public static final long BYTES_WRITTEN = 1L;
-  public static final String ERROR_MSG = "not good uri";
-
-  public enum DownloadResult {
-    SUCCEEDED,
-    FAILED,
-    RUNNING_FOREVER,
-    DO_NOTHING
-  }
-
-  // Obviously this does not work when considering concurrency, but probably fine for test purpose
-  private static boolean boundBefore = false;
-  private static boolean boundNow = false;
-  private static CountDownLatch onUnbindInvokedLatch = new CountDownLatch(1);
-
-  private static boolean bindSucceed = false;
-  private static DownloadResult downloadResult = DownloadResult.SUCCEEDED;
-
-  public static boolean hasEverBeenBound() {
-    return boundBefore;
-  }
-
-  public static boolean isBound() {
-    return boundNow;
-  }
-
-  public static CountDownLatch getOnUnbindInvokedLatch() {
-    return onUnbindInvokedLatch;
-  }
-
-  public static void setBindSucceed(boolean bindSucceed) {
-    TestModelDownloaderService.bindSucceed = bindSucceed;
-  }
-
-  public static void setDownloadResult(DownloadResult result) {
-    TestModelDownloaderService.downloadResult = result;
-  }
-
-  public static void reset() {
-    boundBefore = false;
-    boundNow = false;
-    onUnbindInvokedLatch = new CountDownLatch(1);
-    bindSucceed = false;
-  }
-
-  @Override
-  public IBinder onBind(Intent intent) {
-    if (bindSucceed) {
-      boundBefore = true;
-      boundNow = true;
-      return new TestModelDownloaderServiceImpl();
-    } else {
-      return null;
-    }
-  }
-
-  @Override
-  public boolean onUnbind(Intent intent) {
-    boundNow = false;
-    onUnbindInvokedLatch.countDown();
-    return false;
-  }
-
-  private static final class TestModelDownloaderServiceImpl extends IModelDownloaderService.Stub {
-    @Override
-    public void download(String uri, String unused, IModelDownloaderCallback callback) {
-      try {
-        switch (downloadResult) {
-          case SUCCEEDED:
-            callback.onSuccess(BYTES_WRITTEN);
-            break;
-          case FAILED:
-            callback.onFailure(ERROR_MSG);
-            break;
-          case RUNNING_FOREVER:
-            while (true) {}
-          case DO_NOTHING:
-            // Do nothing
-        }
-      } catch (Throwable t) {
-        // The test would timeout if failing to get the callback result
-      }
-    }
-  }
-}
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/TextClassifierImplTest.java b/java/tests/instrumentation/src/com/android/textclassifier/TextClassifierImplTest.java
index f28732d..81aa832 100644
--- a/java/tests/instrumentation/src/com/android/textclassifier/TextClassifierImplTest.java
+++ b/java/tests/instrumentation/src/com/android/textclassifier/TextClassifierImplTest.java
@@ -41,6 +41,8 @@
 import androidx.test.core.app.ApplicationProvider;
 import androidx.test.ext.junit.runners.AndroidJUnit4;
 import androidx.test.filters.SmallTest;
+import com.android.textclassifier.common.ModelFileManager;
+import com.android.textclassifier.common.TextClassifierSettings;
 import com.android.textclassifier.testing.FakeContextBuilder;
 import com.google.common.collect.ImmutableList;
 import java.io.IOException;
@@ -92,7 +94,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextSelection selection = classifier.suggestSelection(request);
+    TextSelection selection = classifier.suggestSelection(null, null, request);
     assertThat(
         selection, isTextSelection(smartStartIndex, smartEndIndex, TextClassifier.TYPE_EMAIL));
   }
@@ -111,7 +113,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextSelection selection = classifier.suggestSelection(request);
+    TextSelection selection = classifier.suggestSelection(null, null, request);
     assertThat(selection, isTextSelection(smartStartIndex, smartEndIndex, TextClassifier.TYPE_URL));
   }
 
@@ -126,7 +128,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextSelection selection = classifier.suggestSelection(request);
+    TextSelection selection = classifier.suggestSelection(null, null, request);
     assertThat(selection, isTextSelection(startIndex, endIndex, NO_TYPE));
   }
 
@@ -141,7 +143,8 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextClassification classification = classifier.classifyText(request);
+    TextClassification classification =
+        classifier.classifyText(/* sessionId= */ null, null, request);
     assertThat(classification, isTextClassification(classifiedText, TextClassifier.TYPE_EMAIL));
   }
 
@@ -156,7 +159,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextClassification classification = classifier.classifyText(request);
+    TextClassification classification = classifier.classifyText(null, null, request);
     assertThat(classification, isTextClassification(classifiedText, TextClassifier.TYPE_URL));
     assertThat(classification, containsIntentWithAction(Intent.ACTION_VIEW));
   }
@@ -169,7 +172,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextClassification classification = classifier.classifyText(request);
+    TextClassification classification = classifier.classifyText(null, null, request);
     assertThat(classification, isTextClassification(text, TextClassifier.TYPE_ADDRESS));
   }
 
@@ -184,7 +187,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextClassification classification = classifier.classifyText(request);
+    TextClassification classification = classifier.classifyText(null, null, request);
     assertThat(classification, isTextClassification(classifiedText, TextClassifier.TYPE_URL));
     assertThat(classification, containsIntentWithAction(Intent.ACTION_VIEW));
   }
@@ -200,7 +203,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextClassification classification = classifier.classifyText(request);
+    TextClassification classification = classifier.classifyText(null, null, request);
     assertThat(classification, isTextClassification(classifiedText, TextClassifier.TYPE_DATE));
     Bundle extras = classification.getExtras();
     List<Bundle> entities = ExtrasUtils.getEntities(extras);
@@ -221,7 +224,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextClassification classification = classifier.classifyText(request);
+    TextClassification classification = classifier.classifyText(null, null, request);
     assertThat(classification, isTextClassification(classifiedText, TextClassifier.TYPE_DATE_TIME));
   }
 
@@ -235,7 +238,7 @@
             .setDefaultLocales(LOCALES)
             .build();
 
-    TextClassification classification = classifier.classifyText(request);
+    TextClassification classification = classifier.classifyText(null, null, request);
     RemoteAction translateAction = classification.getActions().get(0);
     assertEquals(1, classification.getActions().size());
     assertEquals("Translate", translateAction.getTitle().toString());
@@ -259,7 +262,7 @@
     String text = "The number is +12122537077. See you tonight!";
     TextLinks.Request request = new TextLinks.Request.Builder(text).build();
     assertThat(
-        classifier.generateLinks(request),
+        classifier.generateLinks(null, null, request),
         isTextLinksContaining(text, "+12122537077", TextClassifier.TYPE_PHONE));
   }
 
@@ -275,7 +278,7 @@
             .setDefaultLocales(LOCALES)
             .build();
     assertThat(
-        classifier.generateLinks(request),
+        classifier.generateLinks(null, null, request),
         not(isTextLinksContaining(text, "apple@banana.com", TextClassifier.TYPE_EMAIL)));
   }
 
@@ -289,7 +292,7 @@
             .setDefaultLocales(LOCALES)
             .build();
     assertThat(
-        classifier.generateLinks(request),
+        classifier.generateLinks(null, null, request),
         isTextLinksContaining(
             text, "1600 Amphitheater Parkway, Mountain View, CA", TextClassifier.TYPE_ADDRESS));
   }
@@ -306,7 +309,7 @@
             .setDefaultLocales(LOCALES)
             .build();
     assertThat(
-        classifier.generateLinks(request),
+        classifier.generateLinks(null, null, request),
         not(isTextLinksContaining(text, "apple@banana.com", TextClassifier.TYPE_EMAIL)));
   }
 
@@ -315,7 +318,7 @@
     char[] manySpaces = new char[classifier.getMaxGenerateLinksTextLength()];
     Arrays.fill(manySpaces, ' ');
     TextLinks.Request request = new TextLinks.Request.Builder(new String(manySpaces)).build();
-    TextLinks links = classifier.generateLinks(request);
+    TextLinks links = classifier.generateLinks(null, null, request);
     assertTrue(links.getLinks().isEmpty());
   }
 
@@ -325,7 +328,7 @@
     TextLinks.Request request = new TextLinks.Request.Builder(url).build();
     assertEquals(
         TextLinks.STATUS_UNSUPPORTED_CHARACTER,
-        classifier.generateLinks(request).apply(url, 0, null));
+        classifier.generateLinks(null, null, request).apply(url, 0, null));
   }
 
   @Test
@@ -333,7 +336,8 @@
     char[] manySpaces = new char[classifier.getMaxGenerateLinksTextLength() + 1];
     Arrays.fill(manySpaces, ' ');
     TextLinks.Request request = new TextLinks.Request.Builder(new String(manySpaces)).build();
-    expectThrows(IllegalArgumentException.class, () -> classifier.generateLinks(request));
+    expectThrows(
+        IllegalArgumentException.class, () -> classifier.generateLinks(null, null, request));
   }
 
   @Test
@@ -343,7 +347,7 @@
     ExtrasUtils.putIsSerializedEntityDataEnabled(extras, true);
     TextLinks.Request request = new TextLinks.Request.Builder(text).setExtras(extras).build();
 
-    TextLinks textLinks = classifier.generateLinks(request);
+    TextLinks textLinks = classifier.generateLinks(null, null, request);
 
     assertThat(textLinks.getLinks()).hasSize(1);
     TextLinks.TextLink textLink = textLinks.getLinks().iterator().next();
@@ -358,7 +362,7 @@
     String text = "The number is +12122537077.";
     TextLinks.Request request = new TextLinks.Request.Builder(text).build();
 
-    TextLinks textLinks = classifier.generateLinks(request);
+    TextLinks textLinks = classifier.generateLinks(null, null, request);
 
     assertThat(textLinks.getLinks()).hasSize(1);
     TextLinks.TextLink textLink = textLinks.getLinks().iterator().next();
@@ -370,7 +374,7 @@
   public void testDetectLanguage() throws IOException {
     String text = "This is English text";
     TextLanguage.Request request = new TextLanguage.Request.Builder(text).build();
-    TextLanguage textLanguage = classifier.detectLanguage(request);
+    TextLanguage textLanguage = classifier.detectLanguage(null, null, request);
     assertThat(textLanguage, isTextLanguage("en"));
   }
 
@@ -378,7 +382,7 @@
   public void testDetectLanguage_japanese() throws IOException {
     String text = "これは日本語のテキストです";
     TextLanguage.Request request = new TextLanguage.Request.Builder(text).build();
-    TextLanguage textLanguage = classifier.detectLanguage(request);
+    TextLanguage textLanguage = classifier.detectLanguage(null, null, request);
     assertThat(textLanguage, isTextLanguage("ja"));
   }
 
@@ -399,7 +403,8 @@
             .setTypeConfig(typeConfig)
             .build();
 
-    ConversationActions conversationActions = classifier.suggestConversationActions(request);
+    ConversationActions conversationActions =
+        classifier.suggestConversationActions(null, null, request);
     assertThat(conversationActions.getConversationActions()).hasSize(1);
     ConversationAction conversationAction = conversationActions.getConversationActions().get(0);
     assertThat(conversationAction.getType()).isEqualTo(ConversationAction.TYPE_TEXT_REPLY);
@@ -422,7 +427,8 @@
             .setTypeConfig(typeConfig)
             .build();
 
-    ConversationActions conversationActions = classifier.suggestConversationActions(request);
+    ConversationActions conversationActions =
+        classifier.suggestConversationActions(null, null, request);
     assertTrue(conversationActions.getConversationActions().size() > 1);
     for (ConversationAction conversationAction : conversationActions.getConversationActions()) {
       assertThat(conversationAction, isConversationAction(ConversationAction.TYPE_TEXT_REPLY));
@@ -446,7 +452,8 @@
             .setTypeConfig(typeConfig)
             .build();
 
-    ConversationActions conversationActions = classifier.suggestConversationActions(request);
+    ConversationActions conversationActions =
+        classifier.suggestConversationActions(null, null, request);
     assertThat(conversationActions.getConversationActions()).hasSize(1);
     ConversationAction conversationAction = conversationActions.getConversationActions().get(0);
     assertThat(conversationAction.getType()).isEqualTo(ConversationAction.TYPE_OPEN_URL);
@@ -473,7 +480,8 @@
             .setTypeConfig(typeConfig)
             .build();
 
-    ConversationActions conversationActions = classifier.suggestConversationActions(request);
+    ConversationActions conversationActions =
+        classifier.suggestConversationActions(null, null, request);
     assertThat(conversationActions.getConversationActions()).hasSize(1);
     ConversationAction conversationAction = conversationActions.getConversationActions().get(0);
     assertThat(conversationAction.getType()).isEqualTo(TYPE_COPY);
@@ -495,7 +503,8 @@
             .setMaxSuggestions(3)
             .build();
 
-    ConversationActions conversationActions = classifier.suggestConversationActions(request);
+    ConversationActions conversationActions =
+        classifier.suggestConversationActions(null, null, request);
 
     assertThat(conversationActions.getConversationActions()).isEmpty();
   }
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/ModelFileManagerTest.java b/java/tests/instrumentation/src/com/android/textclassifier/common/ModelFileManagerTest.java
similarity index 90%
rename from java/tests/instrumentation/src/com/android/textclassifier/ModelFileManagerTest.java
rename to java/tests/instrumentation/src/com/android/textclassifier/common/ModelFileManagerTest.java
index 42047d3..40838ac 100644
--- a/java/tests/instrumentation/src/com/android/textclassifier/ModelFileManagerTest.java
+++ b/java/tests/instrumentation/src/com/android/textclassifier/common/ModelFileManagerTest.java
@@ -14,20 +14,20 @@
  * limitations under the License.
  */
 
-package com.android.textclassifier;
+package com.android.textclassifier.common;
 
-import static com.android.textclassifier.ModelFileManager.ModelFile.LANGUAGE_INDEPENDENT;
+import static com.android.textclassifier.common.ModelFileManager.ModelFile.LANGUAGE_INDEPENDENT;
 import static com.google.common.truth.Truth.assertThat;
 
 import android.os.LocaleList;
 import androidx.test.core.app.ApplicationProvider;
 import androidx.test.ext.junit.runners.AndroidJUnit4;
 import androidx.test.filters.SmallTest;
-import com.android.textclassifier.ModelFileManager.ModelFile;
-import com.android.textclassifier.ModelFileManager.ModelType;
-import com.android.textclassifier.ModelFileManager.ModelType.ModelTypeDef;
-import com.android.textclassifier.ModelFileManager.RegularFileFullMatchLister;
-import com.android.textclassifier.ModelFileManager.RegularFilePatternMatchLister;
+import com.android.textclassifier.TestDataUtils;
+import com.android.textclassifier.common.ModelFileManager.ModelFile;
+import com.android.textclassifier.common.ModelFileManager.RegularFileFullMatchLister;
+import com.android.textclassifier.common.ModelFileManager.RegularFilePatternMatchLister;
+import com.android.textclassifier.common.ModelType.ModelTypeDef;
 import com.android.textclassifier.common.logging.ResultIdUtils.ModelInfo;
 import com.android.textclassifier.testing.SetDefaultLocalesRule;
 import com.google.common.base.Optional;
@@ -50,13 +50,9 @@
 @RunWith(AndroidJUnit4.class)
 public final class ModelFileManagerTest {
   private static final Locale DEFAULT_LOCALE = Locale.forLanguageTag("en-US");
-  private static final String URL_SUFFIX = "q/711/en.fb";
-  private static final String URL_SUFFIX_2 = "q/712/en.fb";
 
   @ModelTypeDef private static final String MODEL_TYPE = ModelType.ANNOTATOR;
 
-  @ModelTypeDef private static final String MODEL_TYPE_2 = ModelType.LANG_ID;
-
   @Mock private TextClassifierSettings.IDeviceConfig mockDeviceConfig;
 
   @Rule public final SetDefaultLocalesRule setDefaultLocalesRule = new SetDefaultLocalesRule();
@@ -370,26 +366,6 @@
   }
 
   @Test
-  public void getDownloadTargetFile_targetFileInCorrectDir() {
-    File targetFile = modelFileManager.getDownloadTargetFile(MODEL_TYPE, URL_SUFFIX);
-    assertThat(targetFile.getAbsolutePath())
-        .startsWith(ApplicationProvider.getApplicationContext().getFilesDir().getAbsolutePath());
-  }
-
-  @Test
-  public void getDownloadTargetFile_filePathIsUnique() {
-    File targetFileOne = modelFileManager.getDownloadTargetFile(MODEL_TYPE, URL_SUFFIX);
-    File targetFileTwo = modelFileManager.getDownloadTargetFile(MODEL_TYPE, URL_SUFFIX);
-    File targetFileThree = modelFileManager.getDownloadTargetFile(MODEL_TYPE, URL_SUFFIX_2);
-    File targetFileFour = modelFileManager.getDownloadTargetFile(MODEL_TYPE_2, URL_SUFFIX);
-
-    assertThat(targetFileOne.getAbsolutePath()).isEqualTo(targetFileTwo.getAbsolutePath());
-    assertThat(targetFileOne.getAbsolutePath()).isNotEqualTo(targetFileThree.getAbsolutePath());
-    assertThat(targetFileOne.getAbsolutePath()).isNotEqualTo(targetFileFour.getAbsolutePath());
-    assertThat(targetFileThree.getAbsolutePath()).isNotEqualTo(targetFileFour.getAbsolutePath());
-  }
-
-  @Test
   public void modelFileEquals() {
     ModelFileManager.ModelFile modelA =
         new ModelFileManager.ModelFile(
@@ -501,10 +477,10 @@
     ImmutableList<ModelFile> listedModels = regularFilePatternMatchLister.list(MODEL_TYPE);
 
     assertThat(listedModels).hasSize(2);
-    assertThat(listedModels.get(0).absolutePath).isEqualTo(modelFile1.getAbsolutePath());
     assertThat(listedModels.get(0).isAsset).isFalse();
-    assertThat(listedModels.get(1).absolutePath).isEqualTo(modelFile2.getAbsolutePath());
     assertThat(listedModels.get(1).isAsset).isFalse();
+    assertThat(ImmutableList.of(listedModels.get(0).absolutePath, listedModels.get(1).absolutePath))
+        .containsExactly(modelFile1.getAbsolutePath(), modelFile2.getAbsolutePath());
   }
 
   @Test
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/TextClassifierSettingsTest.java b/java/tests/instrumentation/src/com/android/textclassifier/common/TextClassifierSettingsTest.java
similarity index 68%
rename from java/tests/instrumentation/src/com/android/textclassifier/TextClassifierSettingsTest.java
rename to java/tests/instrumentation/src/com/android/textclassifier/common/TextClassifierSettingsTest.java
index b629efd..21d6943 100644
--- a/java/tests/instrumentation/src/com/android/textclassifier/TextClassifierSettingsTest.java
+++ b/java/tests/instrumentation/src/com/android/textclassifier/common/TextClassifierSettingsTest.java
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package com.android.textclassifier;
+package com.android.textclassifier.common;
 
 import static com.google.common.truth.Truth.assertThat;
 
@@ -22,8 +22,6 @@
 import androidx.test.ext.junit.runners.AndroidJUnit4;
 import androidx.test.filters.SmallTest;
 import androidx.test.platform.app.InstrumentationRegistry;
-import com.android.textclassifier.ModelFileManager.ModelType;
-import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import java.util.HashMap;
 import java.util.Map;
@@ -98,58 +96,57 @@
   }
 
   @Test
-  public void getManifestURLSuffixSetting() {
+  public void getManifestURLSetting() {
     assertSettings(
-        "url_suffix_annotator_en",
-        "suffix:annotator",
+        "manifest_url_annotator_en",
+        "https://annotator",
         settings ->
-            assertThat(settings.getManifestURLSuffix(ModelType.ANNOTATOR, "en"))
-                .isEqualTo("suffix:annotator"));
+            assertThat(settings.getManifestURL(ModelType.ANNOTATOR, "en"))
+                .isEqualTo("https://annotator"));
     assertSettings(
-        "url_suffix_lang_id_universal",
-        "suffix:lang_id",
+        "manifest_url_lang_id_universal",
+        "https://lang_id",
         settings ->
-            assertThat(settings.getManifestURLSuffix(ModelType.LANG_ID, "universal"))
-                .isEqualTo("suffix:lang_id"));
+            assertThat(settings.getManifestURL(ModelType.LANG_ID, "universal"))
+                .isEqualTo("https://lang_id"));
     assertSettings(
-        "url_suffix_actions_suggestions_zh",
-        "suffix:actions_suggestions",
+        "manifest_url_actions_suggestions_zh",
+        "https://actions_suggestions",
         settings ->
-            assertThat(settings.getManifestURLSuffix(ModelType.ACTIONS_SUGGESTIONS, "zh"))
-                .isEqualTo("suffix:actions_suggestions"));
+            assertThat(settings.getManifestURL(ModelType.ACTIONS_SUGGESTIONS, "zh"))
+                .isEqualTo("https://actions_suggestions"));
   }
 
   @Test
-  public void getLanguageTagsForManifestURLSuffix() {
+  public void getLanguageTagsForManifestURL() {
     assertSettings(
         ImmutableMap.of(
-            "url_suffix_annotator_en", "suffix:annotator-en",
-            "url_suffix_annotator_en-us", "suffix:annotator-en-us",
-            "url_suffix_annotator_zh-hant-hk", "suffix:annotator-zh",
-            "url_suffix_lang_id_universal", "suffix:lang_id"),
+            "manifest_url_annotator_en", "https://annotator-en",
+            "manifest_url_annotator_en-us", "https://annotator-en-us",
+            "manifest_url_annotator_zh-hant-hk", "https://annotator-zh",
+            "manifest_url_lang_id_universal", "https://lang_id"),
         settings ->
-            assertThat(settings.getLanguageTagsForManifestURLSuffix(ModelType.ANNOTATOR))
+            assertThat(settings.getLanguageTagsForManifestURL(ModelType.ANNOTATOR))
                 .containsExactly("en", "en-us", "zh-hant-hk"));
 
     assertSettings(
         ImmutableMap.of(
-            "url_suffix_annotator_en", "suffix:annotator-en",
-            "url_suffix_annotator_en-us", "suffix:annotator-en-us",
-            "url_suffix_annotator_zh-hant-hk", "suffix:annotator-zh",
-            "url_suffix_lang_id_universal", "suffix:lang_id"),
+            "manifest_url_annotator_en", "https://annotator-en",
+            "manifest_url_annotator_en-us", "https://annotator-en-us",
+            "manifest_url_annotator_zh-hant-hk", "https://annotator-zh",
+            "manifest_url_lang_id_universal", "https://lang_id"),
         settings ->
-            assertThat(settings.getLanguageTagsForManifestURLSuffix(ModelType.LANG_ID))
-                .containsExactlyElementsIn(
-                    ImmutableList.of(ModelDownloadManager.UNIVERSAL_MODEL_LANGUAGE_TAG)));
+            assertThat(settings.getLanguageTagsForManifestURL(ModelType.LANG_ID))
+                .containsExactly("universal"));
 
     assertSettings(
         ImmutableMap.of(
-            "url_suffix_annotator_en", "suffix:annotator-en",
-            "url_suffix_annotator_en-us", "suffix:annotator-en-us",
-            "url_suffix_annotator_zh-hant-hk", "suffix:annotator-zh",
-            "url_suffix_lang_id_universal", "suffix:lang_id"),
+            "manifest_url_annotator_en", "https://annotator-en",
+            "manifest_url_annotator_en-us", "https://annotator-en-us",
+            "manifest_url_annotator_zh-hant-hk", "https://annotator-zh",
+            "manifest_url_lang_id_universal", "https://lang_id"),
         settings ->
-            assertThat(settings.getLanguageTagsForManifestURLSuffix(ModelType.ACTIONS_SUGGESTIONS))
+            assertThat(settings.getLanguageTagsForManifestURL(ModelType.ACTIONS_SUGGESTIONS))
                 .isEmpty());
   }
 
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/common/statsd/GenerateLinksLoggerTest.java b/java/tests/instrumentation/src/com/android/textclassifier/common/statsd/GenerateLinksLoggerTest.java
index 6c66dd5..e215b15 100644
--- a/java/tests/instrumentation/src/com/android/textclassifier/common/statsd/GenerateLinksLoggerTest.java
+++ b/java/tests/instrumentation/src/com/android/textclassifier/common/statsd/GenerateLinksLoggerTest.java
@@ -18,8 +18,12 @@
 
 import static com.google.common.truth.Truth.assertThat;
 
+import android.os.Binder;
+import android.os.Parcel;
 import android.stats.textclassifier.EventType;
 import android.stats.textclassifier.WidgetType;
+import android.view.textclassifier.TextClassificationContext;
+import android.view.textclassifier.TextClassificationSessionId;
 import android.view.textclassifier.TextClassifier;
 import android.view.textclassifier.TextLinks;
 import androidx.test.core.app.ApplicationProvider;
@@ -55,6 +59,11 @@
       new ModelInfo(1, ImmutableList.of(Locale.ENGLISH));
   private static final ModelInfo LANGID_MODEL =
       new ModelInfo(2, ImmutableList.of(Locale.forLanguageTag("*")));
+  private static final String SESSION_ID = "123456";
+  private static final String WIDGET_TYPE = TextClassifier.WIDGET_TYPE_WEBVIEW;
+  private static final WidgetType WIDGET_TYPE_ENUM = WidgetType.WIDGET_TYPE_WEBVIEW;
+  private final TextClassificationContext textClassificationContext =
+      new TextClassificationContext.Builder(PACKAGE_NAME, WIDGET_TYPE).build();
 
   @Before
   public void setup() throws Exception {
@@ -83,11 +92,11 @@
         new TextLinks.Builder(testText)
             .addLink(phoneOffset, phoneOffset + phoneText.length(), phoneEntityScores)
             .build();
-    String uuid = "uuid";
 
-    GenerateLinksLogger generateLinksLogger =
-        new GenerateLinksLogger(/* sampleRate= */ 1, () -> uuid);
+    GenerateLinksLogger generateLinksLogger = new GenerateLinksLogger(/* sampleRate= */ 1);
     generateLinksLogger.logGenerateLinks(
+        createTextClassificationSessionId(),
+        textClassificationContext,
         testText,
         links,
         PACKAGE_NAME,
@@ -103,10 +112,10 @@
     assertThat(loggedEvents).hasSize(2);
     TextLinkifyEvent summaryEvent =
         AtomsProto.TextLinkifyEvent.newBuilder()
-            .setSessionId(uuid)
+            .setSessionId(SESSION_ID)
             .setEventIndex(0)
             .setModelName("en_v1")
-            .setWidgetType(WidgetType.WIDGET_TYPE_UNKNOWN)
+            .setWidgetType(WIDGET_TYPE_ENUM)
             .setEventType(EventType.LINKS_GENERATED)
             .setPackageName(PACKAGE_NAME)
             .setEntityType("")
@@ -118,10 +127,10 @@
             .build();
     TextLinkifyEvent phoneEvent =
         AtomsProto.TextLinkifyEvent.newBuilder()
-            .setSessionId(uuid)
+            .setSessionId(SESSION_ID)
             .setEventIndex(0)
             .setModelName("en_v1")
-            .setWidgetType(WidgetType.WIDGET_TYPE_UNKNOWN)
+            .setWidgetType(WIDGET_TYPE_ENUM)
             .setEventType(EventType.LINKS_GENERATED)
             .setPackageName(PACKAGE_NAME)
             .setEntityType(TextClassifier.TYPE_PHONE)
@@ -148,11 +157,11 @@
             .addLink(phoneOffset, phoneOffset + phoneText.length(), phoneEntityScores)
             .addLink(addressOffset, addressOffset + addressText.length(), addressEntityScores)
             .build();
-    String uuid = "uuid";
 
-    GenerateLinksLogger generateLinksLogger =
-        new GenerateLinksLogger(/* sampleRate= */ 1, () -> uuid);
+    GenerateLinksLogger generateLinksLogger = new GenerateLinksLogger(/* sampleRate= */ 1);
     generateLinksLogger.logGenerateLinks(
+        createTextClassificationSessionId(),
+        textClassificationContext,
         testText,
         links,
         PACKAGE_NAME,
@@ -182,4 +191,13 @@
     assertThat(phoneEvent.getNumLinks()).isEqualTo(1);
     assertThat(phoneEvent.getLinkedTextLength()).isEqualTo(phoneText.length());
   }
+
+  private static TextClassificationSessionId createTextClassificationSessionId() {
+    // A hack to create TextClassificationSessionId because its constructor is @hide.
+    Parcel parcel = Parcel.obtain();
+    parcel.writeString(SESSION_ID);
+    parcel.writeStrongBinder(new Binder());
+    parcel.setDataPosition(0);
+    return TextClassificationSessionId.CREATOR.createFromParcel(parcel);
+  }
 }
diff --git a/java/tests/instrumentation/src/com/android/textclassifier/testing/TestingDeviceConfig.java b/java/tests/instrumentation/src/com/android/textclassifier/testing/TestingDeviceConfig.java
new file mode 100644
index 0000000..670e3d0
--- /dev/null
+++ b/java/tests/instrumentation/src/com/android/textclassifier/testing/TestingDeviceConfig.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.textclassifier.testing;
+
+import android.provider.DeviceConfig.Properties;
+import androidx.annotation.NonNull;
+import com.android.textclassifier.common.TextClassifierSettings;
+import java.util.HashMap;
+import javax.annotation.Nullable;
+
+/** A fake DeviceConfig implementation for testing purpose. */
+public final class TestingDeviceConfig implements TextClassifierSettings.IDeviceConfig {
+
+  private final HashMap<String, String> strConfigs;
+  private final HashMap<String, Boolean> boolConfigs;
+
+  public TestingDeviceConfig() {
+    this.strConfigs = new HashMap<>();
+    this.boolConfigs = new HashMap<>();
+  }
+
+  public void setConfig(String key, String value) {
+    strConfigs.put(key, value);
+  }
+
+  public void setConfig(String key, boolean value) {
+    boolConfigs.put(key, value);
+  }
+
+  @Override
+  public Properties getProperties(@NonNull String namespace, @NonNull String... names) {
+    Properties.Builder builder = new Properties.Builder(namespace);
+    for (String key : strConfigs.keySet()) {
+      builder.setString(key, strConfigs.get(key));
+    }
+    for (String key : boolConfigs.keySet()) {
+      builder.setBoolean(key, boolConfigs.get(key));
+    }
+    return builder.build();
+  }
+
+  @Override
+  public boolean getBoolean(@NonNull String namespace, @NonNull String name, boolean defaultValue) {
+    return boolConfigs.containsKey(name) ? boolConfigs.get(name) : defaultValue;
+  }
+
+  @Override
+  public String getString(
+      @NonNull String namespace, @NonNull String name, @Nullable String defaultValue) {
+    return strConfigs.containsKey(name) ? strConfigs.get(name) : defaultValue;
+  }
+}
diff --git a/jni/com/google/android/textclassifier/ActionsSuggestionsModel.java b/jni/com/google/android/textclassifier/ActionsSuggestionsModel.java
index 0ddb01c..b5c8ab6 100644
--- a/jni/com/google/android/textclassifier/ActionsSuggestionsModel.java
+++ b/jni/com/google/android/textclassifier/ActionsSuggestionsModel.java
@@ -87,7 +87,7 @@
   }
 
   /** Suggests actions / replies to the given conversation. */
-  public ActionSuggestion[] suggestActions(
+  public ActionSuggestions suggestActions(
       Conversation conversation, ActionSuggestionOptions options, AnnotatorModel annotator) {
     return nativeSuggestActions(
         actionsModelPtr,
@@ -99,7 +99,7 @@
         /* generateAndroidIntents= */ false);
   }
 
-  public ActionSuggestion[] suggestActionsWithIntents(
+  public ActionSuggestions suggestActionsWithIntents(
       Conversation conversation,
       ActionSuggestionOptions options,
       Object appContext,
@@ -172,6 +172,26 @@
         assetFileDescriptor.getLength());
   }
 
+  /** Initializes conversation intent detection, passing the given serialized config to it. */
+  public void initializeConversationIntentDetection(byte[] serializedConfig) {
+    if (!nativeInitializeConversationIntentDetection(actionsModelPtr, serializedConfig)) {
+      throw new IllegalArgumentException("Couldn't initialize conversation intent detection");
+    }
+  }
+
+  /** Represents a list of suggested actions of a given conversation. */
+  public static final class ActionSuggestions {
+    /** A list of suggested actionsm sorted by score descendingly. */
+    public final ActionSuggestion[] actionSuggestions;
+    /** Whether the input conversation is considered as sensitive. */
+    public final boolean isSensitive;
+
+    public ActionSuggestions(ActionSuggestion[] actionSuggestions, boolean isSensitive) {
+      this.actionSuggestions = actionSuggestions;
+      this.isSensitive = isSensitive;
+    }
+  }
+
   /** Action suggestion that contains a response text and the type of the response. */
   public static final class ActionSuggestion {
     @Nullable private final String responseText;
@@ -180,6 +200,7 @@
     @Nullable private final NamedVariant[] entityData;
     @Nullable private final byte[] serializedEntityData;
     @Nullable private final RemoteActionTemplate[] remoteActionTemplates;
+    @Nullable private final Slot[] slots;
 
     public ActionSuggestion(
         @Nullable String responseText,
@@ -187,13 +208,15 @@
         float score,
         @Nullable NamedVariant[] entityData,
         @Nullable byte[] serializedEntityData,
-        @Nullable RemoteActionTemplate[] remoteActionTemplates) {
+        @Nullable RemoteActionTemplate[] remoteActionTemplates,
+        @Nullable Slot[] slots) {
       this.responseText = responseText;
       this.actionType = actionType;
       this.score = score;
       this.entityData = entityData;
       this.serializedEntityData = serializedEntityData;
       this.remoteActionTemplates = remoteActionTemplates;
+      this.slots = slots;
     }
 
     @Nullable
@@ -224,6 +247,11 @@
     public RemoteActionTemplate[] getRemoteActionTemplates() {
       return remoteActionTemplates;
     }
+
+    @Nullable
+    public Slot[] getSlots() {
+      return slots;
+    }
   }
 
   /** Represents a single message in the conversation. */
@@ -295,6 +323,25 @@
     public ActionSuggestionOptions() {}
   }
 
+  /** Represents a slot for an {@link ActionSuggestion}. */
+  public static final class Slot {
+
+    public final String type;
+    public final int messageIndex;
+    public final int startIndex;
+    public final int endIndex;
+    public final float confidenceScore;
+
+    public Slot(
+        String type, int messageIndex, int startIndex, int endIndex, float confidenceScore) {
+      this.type = type;
+      this.messageIndex = messageIndex;
+      this.startIndex = startIndex;
+      this.endIndex = endIndex;
+      this.confidenceScore = confidenceScore;
+    }
+  }
+
   /**
    * Retrieves the pointer to the native object. Note: Need to keep the {@code
    * ActionsSuggestionsModel} alive as long as the pointer is used.
@@ -311,6 +358,9 @@
   private static native long nativeNewActionsModelWithOffset(
       int fd, long offset, long size, byte[] preconditionsOverwrite);
 
+  private native boolean nativeInitializeConversationIntentDetection(
+      long actionsModelPtr, byte[] serializedConfig);
+
   private static native String nativeGetLocales(int fd);
 
   private static native String nativeGetLocalesWithOffset(int fd, long offset, long size);
@@ -323,7 +373,7 @@
 
   private static native String nativeGetNameWithOffset(int fd, long offset, long size);
 
-  private native ActionSuggestion[] nativeSuggestActions(
+  private native ActionSuggestions nativeSuggestActions(
       long context,
       Conversation conversation,
       ActionSuggestionOptions options,
diff --git a/native/Android.bp b/native/Android.bp
index edcc97d..32a58c3 100644
--- a/native/Android.bp
+++ b/native/Android.bp
@@ -100,7 +100,8 @@
         "-DTC3_UNILIB_JAVAICU",
         "-DTC3_CALENDAR_JAVAICU",
         "-DTC3_AOSP",
-        "-DTC3_VOCAB_ANNOTATOR_DUMMY",
+        "-DTC3_VOCAB_ANNOTATOR_IMPL",
+        "-DTC3_POD_NER_ANNOTATOR_IMPL",
     ],
 
     product_variables: {
@@ -128,6 +129,7 @@
         "liblua",
         "libutf",
         "libtflite_static",
+        "tflite_support"
     ],
 }
 
@@ -192,7 +194,7 @@
     name: "libtextclassifier_tests",
     defaults: ["libtextclassifier_defaults"],
 
-    test_suites: ["device-tests", "mts-extservices"],
+    test_suites: ["general-tests", "mts-extservices"],
 
     data: [
         "**/test_data/*",
@@ -239,6 +241,9 @@
         "libbase_ndk",
         "libtextclassifier",
     ],
+    header_libs: [
+        "libtextclassifier_flatbuffer_testonly_headers",
+    ],
 }
 
 android_test {
@@ -246,7 +251,7 @@
     srcs: ["testing/JvmTestLauncher.java"],
     min_sdk_version: "30",
     test_suites: [
-        "device-tests",
+        "general-tests",
         "mts-extservices",
     ],
     static_libs: [
diff --git a/native/AndroidTest.xml b/native/AndroidTest.xml
index b3c012b..11893f5 100644
--- a/native/AndroidTest.xml
+++ b/native/AndroidTest.xml
@@ -14,6 +14,7 @@
      limitations under the License.
 -->
 <configuration description="Config for libtextclassifier_tests">
+    <option name="config-descriptor:metadata" key="mainline-param" value="com.google.android.extservices.apex" />
     <option name="test-suite-tag" value="apct" />
     <option name="test-suite-tag" value="mts" />
 
diff --git a/native/FlatBufferHeaders.bp b/native/FlatBufferHeaders.bp
index 4212bbd..950eee6 100644
--- a/native/FlatBufferHeaders.bp
+++ b/native/FlatBufferHeaders.bp
@@ -15,20 +15,6 @@
 //
 
 genrule {
-    name: "libtextclassifier_fbgen_lang_id_common_flatbuffers_model",
-    srcs: ["lang_id/common/flatbuffers/model.fbs"],
-    out: ["lang_id/common/flatbuffers/model_generated.h"],
-    defaults: ["fbgen"],
-}
-
-genrule {
-    name: "libtextclassifier_fbgen_lang_id_common_flatbuffers_embedding-network",
-    srcs: ["lang_id/common/flatbuffers/embedding-network.fbs"],
-    out: ["lang_id/common/flatbuffers/embedding-network_generated.h"],
-    defaults: ["fbgen"],
-}
-
-genrule {
     name: "libtextclassifier_fbgen_actions_actions_model",
     srcs: ["actions/actions_model.fbs"],
     out: ["actions/actions_model_generated.h"],
@@ -43,9 +29,16 @@
 }
 
 genrule {
-    name: "libtextclassifier_fbgen_annotator_model",
-    srcs: ["annotator/model.fbs"],
-    out: ["annotator/model_generated.h"],
+    name: "libtextclassifier_fbgen_lang_id_common_flatbuffers_embedding-network",
+    srcs: ["lang_id/common/flatbuffers/embedding-network.fbs"],
+    out: ["lang_id/common/flatbuffers/embedding-network_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
+    name: "libtextclassifier_fbgen_lang_id_common_flatbuffers_model",
+    srcs: ["lang_id/common/flatbuffers/model.fbs"],
+    out: ["lang_id/common/flatbuffers/model_generated.h"],
     defaults: ["fbgen"],
 }
 
@@ -57,6 +50,13 @@
 }
 
 genrule {
+    name: "libtextclassifier_fbgen_annotator_datetime_datetime",
+    srcs: ["annotator/datetime/datetime.fbs"],
+    out: ["annotator/datetime/datetime_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
     name: "libtextclassifier_fbgen_annotator_experimental_experimental",
     srcs: ["annotator/experimental/experimental.fbs"],
     out: ["annotator/experimental/experimental_generated.h"],
@@ -71,9 +71,58 @@
 }
 
 genrule {
-    name: "libtextclassifier_fbgen_utils_grammar_testing_value",
-    srcs: ["utils/grammar/testing/value.fbs"],
-    out: ["utils/grammar/testing/value_generated.h"],
+    name: "libtextclassifier_fbgen_annotator_model",
+    srcs: ["annotator/model.fbs"],
+    out: ["annotator/model_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
+    name: "libtextclassifier_fbgen_utils_flatbuffers_flatbuffers",
+    srcs: ["utils/flatbuffers/flatbuffers.fbs"],
+    out: ["utils/flatbuffers/flatbuffers_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
+    name: "libtextclassifier_fbgen_utils_tflite_text_encoder_config",
+    srcs: ["utils/tflite/text_encoder_config.fbs"],
+    out: ["utils/tflite/text_encoder_config_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
+    name: "libtextclassifier_fbgen_utils_resources",
+    srcs: ["utils/resources.fbs"],
+    out: ["utils/resources_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
+    name: "libtextclassifier_fbgen_utils_zlib_buffer",
+    srcs: ["utils/zlib/buffer.fbs"],
+    out: ["utils/zlib/buffer_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
+    name: "libtextclassifier_fbgen_utils_container_bit-vector",
+    srcs: ["utils/container/bit-vector.fbs"],
+    out: ["utils/container/bit-vector_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
+    name: "libtextclassifier_fbgen_utils_intents_intent-config",
+    srcs: ["utils/intents/intent-config.fbs"],
+    out: ["utils/intents/intent-config_generated.h"],
+    defaults: ["fbgen"],
+}
+
+genrule {
+    name: "libtextclassifier_fbgen_utils_normalization",
+    srcs: ["utils/normalization.fbs"],
+    out: ["utils/normalization_generated.h"],
     defaults: ["fbgen"],
 }
 
@@ -92,51 +141,9 @@
 }
 
 genrule {
-    name: "libtextclassifier_fbgen_utils_normalization",
-    srcs: ["utils/normalization.fbs"],
-    out: ["utils/normalization_generated.h"],
-    defaults: ["fbgen"],
-}
-
-genrule {
-    name: "libtextclassifier_fbgen_utils_resources",
-    srcs: ["utils/resources.fbs"],
-    out: ["utils/resources_generated.h"],
-    defaults: ["fbgen"],
-}
-
-genrule {
-    name: "libtextclassifier_fbgen_utils_i18n_language-tag",
-    srcs: ["utils/i18n/language-tag.fbs"],
-    out: ["utils/i18n/language-tag_generated.h"],
-    defaults: ["fbgen"],
-}
-
-genrule {
-    name: "libtextclassifier_fbgen_utils_tflite_text_encoder_config",
-    srcs: ["utils/tflite/text_encoder_config.fbs"],
-    out: ["utils/tflite/text_encoder_config_generated.h"],
-    defaults: ["fbgen"],
-}
-
-genrule {
-    name: "libtextclassifier_fbgen_utils_flatbuffers_flatbuffers",
-    srcs: ["utils/flatbuffers/flatbuffers.fbs"],
-    out: ["utils/flatbuffers/flatbuffers_generated.h"],
-    defaults: ["fbgen"],
-}
-
-genrule {
-    name: "libtextclassifier_fbgen_utils_container_bit-vector",
-    srcs: ["utils/container/bit-vector.fbs"],
-    out: ["utils/container/bit-vector_generated.h"],
-    defaults: ["fbgen"],
-}
-
-genrule {
-    name: "libtextclassifier_fbgen_utils_tokenizer",
-    srcs: ["utils/tokenizer.fbs"],
-    out: ["utils/tokenizer_generated.h"],
+    name: "libtextclassifier_fbgen_utils_grammar_testing_value",
+    srcs: ["utils/grammar/testing/value.fbs"],
+    out: ["utils/grammar/testing/value_generated.h"],
     defaults: ["fbgen"],
 }
 
@@ -148,16 +155,16 @@
 }
 
 genrule {
-    name: "libtextclassifier_fbgen_utils_zlib_buffer",
-    srcs: ["utils/zlib/buffer.fbs"],
-    out: ["utils/zlib/buffer_generated.h"],
+    name: "libtextclassifier_fbgen_utils_tokenizer",
+    srcs: ["utils/tokenizer.fbs"],
+    out: ["utils/tokenizer_generated.h"],
     defaults: ["fbgen"],
 }
 
 genrule {
-    name: "libtextclassifier_fbgen_utils_intents_intent-config",
-    srcs: ["utils/intents/intent-config.fbs"],
-    out: ["utils/intents/intent-config_generated.h"],
+    name: "libtextclassifier_fbgen_utils_i18n_language-tag",
+    srcs: ["utils/i18n/language-tag.fbs"],
+    out: ["utils/i18n/language-tag_generated.h"],
     defaults: ["fbgen"],
 }
 
@@ -171,49 +178,66 @@
         "com.android.extservices",
     ],
     generated_headers: [
-        "libtextclassifier_fbgen_lang_id_common_flatbuffers_model",
-        "libtextclassifier_fbgen_lang_id_common_flatbuffers_embedding-network",
         "libtextclassifier_fbgen_actions_actions_model",
         "libtextclassifier_fbgen_actions_actions-entity-data",
-        "libtextclassifier_fbgen_annotator_model",
+        "libtextclassifier_fbgen_lang_id_common_flatbuffers_embedding-network",
+        "libtextclassifier_fbgen_lang_id_common_flatbuffers_model",
         "libtextclassifier_fbgen_annotator_person_name_person_name_model",
+        "libtextclassifier_fbgen_annotator_datetime_datetime",
         "libtextclassifier_fbgen_annotator_experimental_experimental",
         "libtextclassifier_fbgen_annotator_entity-data",
-        "libtextclassifier_fbgen_utils_grammar_testing_value",
+        "libtextclassifier_fbgen_annotator_model",
+        "libtextclassifier_fbgen_utils_flatbuffers_flatbuffers",
+        "libtextclassifier_fbgen_utils_tflite_text_encoder_config",
+        "libtextclassifier_fbgen_utils_resources",
+        "libtextclassifier_fbgen_utils_zlib_buffer",
+        "libtextclassifier_fbgen_utils_container_bit-vector",
+        "libtextclassifier_fbgen_utils_intents_intent-config",
+        "libtextclassifier_fbgen_utils_normalization",
         "libtextclassifier_fbgen_utils_grammar_semantics_expression",
         "libtextclassifier_fbgen_utils_grammar_rules",
-        "libtextclassifier_fbgen_utils_normalization",
-        "libtextclassifier_fbgen_utils_resources",
-        "libtextclassifier_fbgen_utils_i18n_language-tag",
-        "libtextclassifier_fbgen_utils_tflite_text_encoder_config",
-        "libtextclassifier_fbgen_utils_flatbuffers_flatbuffers",
-        "libtextclassifier_fbgen_utils_container_bit-vector",
-        "libtextclassifier_fbgen_utils_tokenizer",
         "libtextclassifier_fbgen_utils_codepoint-range",
-        "libtextclassifier_fbgen_utils_zlib_buffer",
-        "libtextclassifier_fbgen_utils_intents_intent-config",
+        "libtextclassifier_fbgen_utils_tokenizer",
+        "libtextclassifier_fbgen_utils_i18n_language-tag",
     ],
     export_generated_headers: [
-        "libtextclassifier_fbgen_lang_id_common_flatbuffers_model",
-        "libtextclassifier_fbgen_lang_id_common_flatbuffers_embedding-network",
         "libtextclassifier_fbgen_actions_actions_model",
         "libtextclassifier_fbgen_actions_actions-entity-data",
-        "libtextclassifier_fbgen_annotator_model",
+        "libtextclassifier_fbgen_lang_id_common_flatbuffers_embedding-network",
+        "libtextclassifier_fbgen_lang_id_common_flatbuffers_model",
         "libtextclassifier_fbgen_annotator_person_name_person_name_model",
+        "libtextclassifier_fbgen_annotator_datetime_datetime",
         "libtextclassifier_fbgen_annotator_experimental_experimental",
         "libtextclassifier_fbgen_annotator_entity-data",
-        "libtextclassifier_fbgen_utils_grammar_testing_value",
+        "libtextclassifier_fbgen_annotator_model",
+        "libtextclassifier_fbgen_utils_flatbuffers_flatbuffers",
+        "libtextclassifier_fbgen_utils_tflite_text_encoder_config",
+        "libtextclassifier_fbgen_utils_resources",
+        "libtextclassifier_fbgen_utils_zlib_buffer",
+        "libtextclassifier_fbgen_utils_container_bit-vector",
+        "libtextclassifier_fbgen_utils_intents_intent-config",
+        "libtextclassifier_fbgen_utils_normalization",
         "libtextclassifier_fbgen_utils_grammar_semantics_expression",
         "libtextclassifier_fbgen_utils_grammar_rules",
-        "libtextclassifier_fbgen_utils_normalization",
-        "libtextclassifier_fbgen_utils_resources",
-        "libtextclassifier_fbgen_utils_i18n_language-tag",
-        "libtextclassifier_fbgen_utils_tflite_text_encoder_config",
-        "libtextclassifier_fbgen_utils_flatbuffers_flatbuffers",
-        "libtextclassifier_fbgen_utils_container_bit-vector",
-        "libtextclassifier_fbgen_utils_tokenizer",
         "libtextclassifier_fbgen_utils_codepoint-range",
-        "libtextclassifier_fbgen_utils_zlib_buffer",
-        "libtextclassifier_fbgen_utils_intents_intent-config",
+        "libtextclassifier_fbgen_utils_tokenizer",
+        "libtextclassifier_fbgen_utils_i18n_language-tag",
+    ],
+}
+
+cc_library_headers {
+    name: "libtextclassifier_flatbuffer_testonly_headers",
+    stl: "libc++_static",
+    sdk_version: "current",
+    min_sdk_version: "30",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.extservices",
+    ],
+    generated_headers: [
+        "libtextclassifier_fbgen_utils_grammar_testing_value",
+    ],
+    export_generated_headers: [
+        "libtextclassifier_fbgen_utils_grammar_testing_value",
     ],
 }
diff --git a/native/JavaTests.bp b/native/JavaTests.bp
index 1c5099d..78d5748 100644
--- a/native/JavaTests.bp
+++ b/native/JavaTests.bp
@@ -17,27 +17,30 @@
 filegroup {
     name: "libtextclassifier_java_test_sources",
     srcs: [
-        "actions/actions-suggestions_test.cc",
         "actions/grammar-actions_test.cc",
+        "actions/actions-suggestions_test.cc",
+        "annotator/pod_ner/pod-ner-impl_test.cc",
         "annotator/datetime/regex-parser_test.cc",
-        "utils/grammar/parsing/lexer_test.cc",
-        "utils/regex-match_test.cc",
-        "utils/calendar/calendar_test.cc",
+        "annotator/datetime/grammar-parser_test.cc",
+        "annotator/datetime/datetime-grounder_test.cc",
         "utils/intents/intent-generator-test-lib.cc",
-        "annotator/grammar/grammar-annotator_test.cc",
-        "annotator/grammar/test-utils.cc",
+        "utils/calendar/calendar_test.cc",
+        "utils/regex-match_test.cc",
+        "utils/grammar/parsing/lexer_test.cc",
         "annotator/number/number_test-include.cc",
         "annotator/annotator_test-include.cc",
+        "annotator/grammar/grammar-annotator_test.cc",
+        "annotator/grammar/test-utils.cc",
         "utils/utf8/unilib_test-include.cc",
-        "utils/grammar/parsing/parser_test.cc",
         "utils/grammar/analyzer_test.cc",
         "utils/grammar/semantics/composer_test.cc",
-        "utils/grammar/semantics/evaluators/merge-values-eval_test.cc",
-        "utils/grammar/semantics/evaluators/constituent-eval_test.cc",
-        "utils/grammar/semantics/evaluators/parse-number-eval_test.cc",
         "utils/grammar/semantics/evaluators/arithmetic-eval_test.cc",
-        "utils/grammar/semantics/evaluators/span-eval_test.cc",
+        "utils/grammar/semantics/evaluators/merge-values-eval_test.cc",
         "utils/grammar/semantics/evaluators/const-eval_test.cc",
         "utils/grammar/semantics/evaluators/compose-eval_test.cc",
+        "utils/grammar/semantics/evaluators/span-eval_test.cc",
+        "utils/grammar/semantics/evaluators/parse-number-eval_test.cc",
+        "utils/grammar/semantics/evaluators/constituent-eval_test.cc",
+        "utils/grammar/parsing/parser_test.cc",
     ],
 }
diff --git a/native/actions/actions-entity-data.fbs b/native/actions/actions-entity-data.fbs
old mode 100755
new mode 100644
diff --git a/native/actions/actions-suggestions.cc b/native/actions/actions-suggestions.cc
index 69235d7..b1a042c 100644
--- a/native/actions/actions-suggestions.cc
+++ b/native/actions/actions-suggestions.cc
@@ -17,10 +17,15 @@
 #include "actions/actions-suggestions.h"
 
 #include <memory>
+#include <vector>
+
+#include "utils/base/statusor.h"
 
 #if !defined(TC3_DISABLE_LUA)
 #include "actions/lua-actions.h"
 #endif
+#include "actions/ngram-model.h"
+#include "actions/tflite-sensitive-model.h"
 #include "actions/types.h"
 #include "actions/utils.h"
 #include "actions/zlib-utils.h"
@@ -86,6 +91,22 @@
     return result;
   }
 }
+
+template <typename T>
+void SetVectorOrScalarAsModelInput(
+    const int param_index, const Variant& param_value,
+    tflite::Interpreter* interpreter,
+    const std::unique_ptr<const TfLiteModelExecutor>& model_executor) {
+  if (param_value.Has<std::vector<T>>()) {
+    model_executor->SetInput<T>(
+        param_index, param_value.ConstRefValue<std::vector<T>>(), interpreter);
+  } else if (param_value.Has<T>()) {
+    model_executor->SetInput<float>(param_index, param_value.Value<T>(),
+                                    interpreter);
+  } else {
+    TC3_LOG(ERROR) << "Variant type error!";
+  }
+}
 }  // namespace
 
 std::unique_ptr<ActionsSuggestions> ActionsSuggestions::FromUnownedBuffer(
@@ -369,15 +390,23 @@
 
   // Create low confidence model if specified.
   if (model_->low_confidence_ngram_model() != nullptr) {
-    ngram_model_ = NGramModel::Create(
+    sensitive_model_ = NGramSensitiveModel::Create(
         unilib_, model_->low_confidence_ngram_model(),
         feature_processor_ == nullptr ? nullptr
                                       : feature_processor_->tokenizer());
-    if (ngram_model_ == nullptr) {
+    if (sensitive_model_ == nullptr) {
       TC3_LOG(ERROR) << "Could not create ngram linear regression model.";
       return false;
     }
   }
+  if (model_->low_confidence_tflite_model() != nullptr) {
+    sensitive_model_ =
+        TFLiteSensitiveModel::Create(model_->low_confidence_tflite_model());
+    if (sensitive_model_ == nullptr) {
+      TC3_LOG(ERROR) << "Could not create TFLite sensitive model.";
+      return false;
+    }
+  }
 
   return true;
 }
@@ -726,16 +755,24 @@
       const bool has_value = param_value_it != model_parameters.end();
       switch (param_type) {
         case kTfLiteFloat32:
-          model_executor_->SetInput<float>(
-              param_index,
-              has_value ? param_value_it->second.Value<float>() : kDefaultFloat,
-              interpreter);
+          if (has_value) {
+            SetVectorOrScalarAsModelInput<float>(param_index,
+                                                 param_value_it->second,
+                                                 interpreter, model_executor_);
+          } else {
+            model_executor_->SetInput<float>(param_index, kDefaultFloat,
+                                             interpreter);
+          }
           break;
         case kTfLiteInt32:
-          model_executor_->SetInput<int32_t>(
-              param_index,
-              has_value ? param_value_it->second.Value<int>() : kDefaultInt,
-              interpreter);
+          if (has_value) {
+            SetVectorOrScalarAsModelInput<int32_t>(
+                param_index, param_value_it->second, interpreter,
+                model_executor_);
+          } else {
+            model_executor_->SetInput<int32_t>(param_index, kDefaultInt,
+                                               interpreter);
+          }
           break;
         case kTfLiteInt64:
           model_executor_->SetInput<int64_t>(
@@ -860,13 +897,12 @@
       return false;
     }
     response->sensitivity_score = sensitive_topic_score.data()[0];
-    response->output_filtered_sensitivity =
-        (response->sensitivity_score >
-         preconditions_.max_sensitive_topic_score);
+    response->is_sensitive = (response->sensitivity_score >
+                              preconditions_.max_sensitive_topic_score);
   }
 
   // Suppress model outputs.
-  if (response->output_filtered_sensitivity) {
+  if (response->is_sensitive) {
     return true;
   }
 
@@ -948,6 +984,12 @@
     std::unique_ptr<tflite::Interpreter>* interpreter) const {
   TC3_CHECK_LE(num_messages, conversation.messages.size());
 
+  if (sensitive_model_ != nullptr &&
+      sensitive_model_->EvalConversation(conversation, num_messages).first) {
+    response->is_sensitive = true;
+    return true;
+  }
+
   if (!model_executor_) {
     return true;
   }
@@ -1003,6 +1045,18 @@
   return ReadModelOutput(interpreter->get(), options, response);
 }
 
+Status ActionsSuggestions::SuggestActionsFromConversationIntentDetection(
+    const Conversation& conversation, const ActionSuggestionOptions& options,
+    std::vector<ActionSuggestion>* actions) const {
+  TC3_ASSIGN_OR_RETURN(
+      std::vector<ActionSuggestion> new_actions,
+      conversation_intent_detection_->SuggestActions(conversation, options));
+  for (auto& action : new_actions) {
+    actions->push_back(std::move(action));
+  }
+  return Status::OK;
+}
+
 AnnotationOptions ActionsSuggestions::AnnotationOptionsForMessage(
     const ConversationMessage& message) const {
   AnnotationOptions options;
@@ -1308,10 +1362,7 @@
 
   std::vector<const UniLib::RegexPattern*> post_check_rules;
   if (preconditions_.suppress_on_low_confidence_input) {
-    if ((ngram_model_ != nullptr &&
-         ngram_model_->EvalConversation(annotated_conversation,
-                                        num_messages)) ||
-        regex_actions_->IsLowConfidenceInput(annotated_conversation,
+    if (regex_actions_->IsLowConfidenceInput(annotated_conversation,
                                              num_messages, &post_check_rules)) {
       response->output_filtered_low_confidence = true;
       return true;
@@ -1325,12 +1376,24 @@
     return false;
   }
 
+  // SuggestActionsFromModel also detects if the conversation is sensitive,
+  // either by using the old ngram model or the new model.
   // Suppress all predictions if the conversation was deemed sensitive.
-  if (preconditions_.suppress_on_sensitive_topic &&
-      response->output_filtered_sensitivity) {
+  if (preconditions_.suppress_on_sensitive_topic && response->is_sensitive) {
     return true;
   }
 
+  if (conversation_intent_detection_) {
+    // TODO(zbin): Ensure the deduplication/ranking logic in ranker.cc works.
+    auto actions = SuggestActionsFromConversationIntentDetection(
+        annotated_conversation, options, &response->actions);
+    if (!actions.ok()) {
+      TC3_LOG(ERROR) << "Could not run conversation intent detection: "
+                     << actions.error_message();
+      return false;
+    }
+  }
+
   if (!SuggestActionsFromLua(
           annotated_conversation, model_executor_.get(), interpreter.get(),
           annotator != nullptr ? annotator->entity_data_schema() : nullptr,
@@ -1415,4 +1478,16 @@
   return LoadAndVerifyModel(reinterpret_cast<const uint8_t*>(buffer), size);
 }
 
+bool ActionsSuggestions::InitializeConversationIntentDetection(
+    const std::string& serialized_config) {
+  auto conversation_intent_detection =
+      std::make_unique<ConversationIntentDetection>();
+  if (!conversation_intent_detection->Initialize(serialized_config).ok()) {
+    TC3_LOG(ERROR) << "Failed to initialize conversation intent detection.";
+    return false;
+  }
+  conversation_intent_detection_ = std::move(conversation_intent_detection);
+  return true;
+}
+
 }  // namespace libtextclassifier3
diff --git a/native/actions/actions-suggestions.h b/native/actions/actions-suggestions.h
index 04c8aa7..32edc78 100644
--- a/native/actions/actions-suggestions.h
+++ b/native/actions/actions-suggestions.h
@@ -25,11 +25,12 @@
 #include <vector>
 
 #include "actions/actions_model_generated.h"
+#include "actions/conversation_intent_detection/conversation-intent-detection.h"
 #include "actions/feature-processor.h"
 #include "actions/grammar-actions.h"
-#include "actions/ngram-model.h"
 #include "actions/ranker.h"
 #include "actions/regex-actions.h"
+#include "actions/sensitive-classifier-base.h"
 #include "actions/types.h"
 #include "annotator/annotator.h"
 #include "annotator/model-executor.h"
@@ -45,12 +46,6 @@
 
 namespace libtextclassifier3 {
 
-// Options for suggesting actions.
-struct ActionSuggestionOptions {
-  static ActionSuggestionOptions Default() { return ActionSuggestionOptions(); }
-  std::unordered_map<std::string, Variant> model_parameters;
-};
-
 // Class for predicting actions following a conversation.
 class ActionsSuggestions {
  public:
@@ -110,6 +105,9 @@
       const Conversation& conversation, const Annotator* annotator,
       const ActionSuggestionOptions& options = ActionSuggestionOptions()) const;
 
+  bool InitializeConversationIntentDetection(
+      const std::string& serialized_config);
+
   const ActionsModel* model() const;
   const reflection::Schema* entity_data_schema() const;
 
@@ -196,6 +194,10 @@
       ActionsSuggestionsResponse* response,
       std::unique_ptr<tflite::Interpreter>* interpreter) const;
 
+  Status SuggestActionsFromConversationIntentDetection(
+      const Conversation& conversation, const ActionSuggestionOptions& options,
+      std::vector<ActionSuggestion>* actions) const;
+
   // Creates options for annotation of a message.
   AnnotationOptions AnnotationOptionsForMessage(
       const ConversationMessage& message) const;
@@ -264,7 +266,11 @@
   const TriggeringPreconditions* triggering_preconditions_overlay_;
 
   // Low confidence input ngram classifier.
-  std::unique_ptr<const NGramModel> ngram_model_;
+  std::unique_ptr<const SensitiveTopicModelBase> sensitive_model_;
+
+  // Conversation intent detection model for additional actions.
+  std::unique_ptr<const ConversationIntentDetection>
+      conversation_intent_detection_;
 };
 
 // Interprets the buffer as a Model flatbuffer and returns it for reading.
diff --git a/native/actions/actions-suggestions_test.cc b/native/actions/actions-suggestions_test.cc
index ddaa604..7fe69fc 100644
--- a/native/actions/actions-suggestions_test.cc
+++ b/native/actions/actions-suggestions_test.cc
@@ -51,10 +51,18 @@
 constexpr char kModelFileName[] = "actions_suggestions_test.model";
 constexpr char kModelGrammarFileName[] =
     "actions_suggestions_grammar_test.model";
+constexpr char kMultiTaskTF2TestModelFileName[] =
+    "actions_suggestions_test.multi_task_tf2_test.model";
 constexpr char kMultiTaskModelFileName[] =
     "actions_suggestions_test.multi_task_9heads.model";
 constexpr char kHashGramModelFileName[] =
     "actions_suggestions_test.hashgram.model";
+constexpr char kMultiTaskSrP13nModelFileName[] =
+    "actions_suggestions_test.multi_task_sr_p13n.model";
+constexpr char kMultiTaskSrEmojiModelFileName[] =
+    "actions_suggestions_test.multi_task_sr_emoji.model";
+constexpr char kSensitiveTFliteModelFileName[] =
+    "actions_suggestions_test.sensitive_tflite.model";
 
 std::string ReadFile(const std::string& file_name) {
   std::ifstream file_stream(file_name);
@@ -79,6 +87,11 @@
     return ActionsSuggestions::FromPath(
         GetModelPath() + kMultiTaskModelFileName, unilib_.get());
   }
+
+  std::unique_ptr<ActionsSuggestions> LoadMultiTaskSrP13nTestModel() {
+    return ActionsSuggestions::FromPath(
+        GetModelPath() + kMultiTaskSrP13nModelFileName, unilib_.get());
+  }
   std::unique_ptr<UniLib> unilib_;
 };
 
@@ -795,6 +808,22 @@
   EXPECT_EQ(response.actions[0].score, 1.0);
 }
 
+TEST_F(ActionsSuggestionsTest, SuggestsActionsFromTF2MultiTaskModel) {
+  std::unique_ptr<ActionsSuggestions> actions_suggestions =
+      LoadTestModel(kMultiTaskTF2TestModelFileName);
+  const ActionsSuggestionsResponse response =
+      actions_suggestions->SuggestActions(
+          {{{/*user_id=*/1, "Hello how are you",
+             /*reference_time_ms_utc=*/0,
+             /*reference_timezone=*/"Europe/Zurich",
+             /*annotations=*/{},
+             /*locales=*/"en"}}});
+  EXPECT_EQ(response.actions.size(), 4);
+  EXPECT_EQ(response.actions[0].response_text, "Okay");
+  EXPECT_EQ(response.actions[0].type, "REPLY_SUGGESTION");
+  EXPECT_EQ(response.actions[3].type, "TEST_CLASSIFIER_INTENT");
+}
+
 TEST_F(ActionsSuggestionsTest, SuggestsActionsFromPhoneGrammarAnnotations) {
   std::unique_ptr<ActionsSuggestions> actions_suggestions =
       LoadTestModel(kModelGrammarFileName);
@@ -1685,6 +1714,36 @@
   EXPECT_EQ(response.actions.size(), 3 /*3 smart replies*/);
 }
 
+const int kUserProfileSize = 1000;
+constexpr char kUserProfileTokenIndex[] = "user_profile_token_index";
+constexpr char kUserProfileTokenWeight[] = "user_profile_token_weight";
+
+ActionSuggestionOptions GetOptionsForSmartReplyP13nModel() {
+  ActionSuggestionOptions options;
+  const std::vector<int> user_profile_token_indexes(kUserProfileSize, 1);
+  const std::vector<float> user_profile_token_weights(kUserProfileSize, 0.1f);
+  options.model_parameters.insert(
+      {kUserProfileTokenIndex,
+       libtextclassifier3::Variant(user_profile_token_indexes)});
+  options.model_parameters.insert(
+      {kUserProfileTokenWeight,
+       libtextclassifier3::Variant(user_profile_token_weights)});
+  return options;
+}
+
+TEST_F(ActionsSuggestionsTest, MultiTaskSuggestActionsSmartReplyP13n) {
+  std::unique_ptr<ActionsSuggestions> actions_suggestions =
+      LoadMultiTaskSrP13nTestModel();
+  const ActionSuggestionOptions options = GetOptionsForSmartReplyP13nModel();
+  const ActionsSuggestionsResponse response =
+      actions_suggestions->SuggestActions(
+          {{{/*user_id=*/1, "How are you?", /*reference_time_ms_utc=*/0,
+             /*reference_timezone=*/"Europe/Zurich",
+             /*annotations=*/{}, /*locales=*/"en"}}},
+          /*annotator=*/nullptr, options);
+  EXPECT_EQ(response.actions.size(), 3 /*3 smart replies*/);
+}
+
 TEST_F(ActionsSuggestionsTest,
        MultiTaskSuggestActionsDiversifiedSmartReplyAndLocation) {
   std::unique_ptr<ActionsSuggestions> actions_suggestions =
@@ -1734,5 +1793,37 @@
   EXPECT_EQ(response.actions.size(), 5 /*1 location share + 3 smart replies*/);
 }
 
+TEST_F(ActionsSuggestionsTest, SuggestsActionsFromMultiTaskSrEmojiModel) {
+  std::unique_ptr<ActionsSuggestions> actions_suggestions =
+      LoadTestModel(kMultiTaskSrEmojiModelFileName);
+  const ActionsSuggestionsResponse response =
+      actions_suggestions->SuggestActions(
+          {{{/*user_id=*/1, "hello?",
+             /*reference_time_ms_utc=*/0,
+             /*reference_timezone=*/"Europe/Zurich",
+             /*annotations=*/{},
+             /*locales=*/"en"}}});
+  EXPECT_EQ(response.actions.size(), 5);
+  EXPECT_EQ(response.actions[0].response_text, "😁");
+  EXPECT_EQ(response.actions[0].type, "EMOJI_CONCEPT");
+  EXPECT_EQ(response.actions[1].response_text, "Yes");
+  EXPECT_EQ(response.actions[1].type, "REPLY_SUGGESTION");
+}
+
+TEST_F(ActionsSuggestionsTest, SuggestsActionsFromSensitiveTfLiteModel) {
+  std::unique_ptr<ActionsSuggestions> actions_suggestions =
+      LoadTestModel(kSensitiveTFliteModelFileName);
+  const ActionsSuggestionsResponse response =
+      actions_suggestions->SuggestActions(
+          {{{/*user_id=*/1, "I want to kill myself",
+             /*reference_time_ms_utc=*/0,
+             /*reference_timezone=*/"Europe/Zurich",
+             /*annotations=*/{},
+             /*locales=*/"en"}}});
+  EXPECT_EQ(response.actions.size(), 0);
+  EXPECT_TRUE(response.is_sensitive);
+  EXPECT_FALSE(response.output_filtered_low_confidence);
+}
+
 }  // namespace
 }  // namespace libtextclassifier3
diff --git a/native/actions/actions_jni.cc b/native/actions/actions_jni.cc
index 1d5c2fb..9e15a2e 100644
--- a/native/actions/actions_jni.cc
+++ b/native/actions/actions_jni.cc
@@ -40,7 +40,6 @@
 
 using libtextclassifier3::ActionsSuggestions;
 using libtextclassifier3::ActionsSuggestionsResponse;
-using libtextclassifier3::ActionSuggestion;
 using libtextclassifier3::ActionSuggestionOptions;
 using libtextclassifier3::Annotator;
 using libtextclassifier3::Conversation;
@@ -122,63 +121,89 @@
   return options;
 }
 
-StatusOr<ScopedLocalRef<jobjectArray>> ActionSuggestionsToJObjectArray(
+StatusOr<ScopedLocalRef<jobject>> ActionSuggestionsToJObject(
     JNIEnv* env, const ActionsSuggestionsJniContext* context,
     jobject app_context,
     const reflection::Schema* annotations_entity_data_schema,
-    const std::vector<ActionSuggestion>& action_result,
+    const ActionsSuggestionsResponse& action_response,
     const Conversation& conversation, const jstring device_locales,
     const bool generate_intents) {
-  auto status_or_result_class = JniHelper::FindClass(
+  // Find the class ActionSuggestion.
+  auto status_or_action_class = JniHelper::FindClass(
       env, TC3_PACKAGE_PATH TC3_ACTIONS_CLASS_NAME_STR "$ActionSuggestion");
-  if (!status_or_result_class.ok()) {
+  if (!status_or_action_class.ok()) {
     TC3_LOG(ERROR) << "Couldn't find ActionSuggestion class.";
+    return status_or_action_class.status();
+  }
+  ScopedLocalRef<jclass> action_class =
+      std::move(status_or_action_class.ValueOrDie());
+
+  // Find the class ActionSuggestions
+  auto status_or_result_class = JniHelper::FindClass(
+      env, TC3_PACKAGE_PATH TC3_ACTIONS_CLASS_NAME_STR "$ActionSuggestions");
+  if (!status_or_result_class.ok()) {
+    TC3_LOG(ERROR) << "Couldn't find ActionSuggestions class.";
     return status_or_result_class.status();
   }
   ScopedLocalRef<jclass> result_class =
       std::move(status_or_result_class.ValueOrDie());
 
+  // Find the class Slot.
+  auto status_or_slot_class = JniHelper::FindClass(
+      env, TC3_PACKAGE_PATH TC3_ACTIONS_CLASS_NAME_STR "$Slot");
+  if (!status_or_slot_class.ok()) {
+    TC3_LOG(ERROR) << "Couldn't find Slot class.";
+    return status_or_slot_class.status();
+  }
+  ScopedLocalRef<jclass> slot_class =
+      std::move(status_or_slot_class.ValueOrDie());
+
   TC3_ASSIGN_OR_RETURN(
-      const jmethodID result_class_constructor,
+      const jmethodID action_class_constructor,
       JniHelper::GetMethodID(
-          env, result_class.get(), "<init>",
+          env, action_class.get(), "<init>",
           "(Ljava/lang/String;Ljava/lang/String;F[L" TC3_PACKAGE_PATH
               TC3_NAMED_VARIANT_CLASS_NAME_STR
           ";[B[L" TC3_PACKAGE_PATH TC3_REMOTE_ACTION_TEMPLATE_CLASS_NAME_STR
-          ";)V"));
-  TC3_ASSIGN_OR_RETURN(ScopedLocalRef<jobjectArray> results,
-                       JniHelper::NewObjectArray(env, action_result.size(),
-                                                 result_class.get(), nullptr));
-  for (int i = 0; i < action_result.size(); i++) {
+          ";[L" TC3_PACKAGE_PATH TC3_ACTIONS_CLASS_NAME_STR "$Slot;)V"));
+  TC3_ASSIGN_OR_RETURN(const jmethodID slot_class_constructor,
+                       JniHelper::GetMethodID(env, slot_class.get(), "<init>",
+                                              "(Ljava/lang/String;IIIF)V"));
+  TC3_ASSIGN_OR_RETURN(
+      ScopedLocalRef<jobjectArray> actions,
+      JniHelper::NewObjectArray(env, action_response.actions.size(),
+                                action_class.get(), nullptr));
+  for (int i = 0; i < action_response.actions.size(); i++) {
     ScopedLocalRef<jobjectArray> extras;
     const reflection::Schema* actions_entity_data_schema =
         context->model()->entity_data_schema();
     if (actions_entity_data_schema != nullptr &&
-        !action_result[i].serialized_entity_data.empty()) {
+        !action_response.actions[i].serialized_entity_data.empty()) {
       TC3_ASSIGN_OR_RETURN(
           extras, context->template_handler()->EntityDataAsNamedVariantArray(
                       actions_entity_data_schema,
-                      action_result[i].serialized_entity_data));
+                      action_response.actions[i].serialized_entity_data));
     }
 
     ScopedLocalRef<jbyteArray> serialized_entity_data;
-    if (!action_result[i].serialized_entity_data.empty()) {
+    if (!action_response.actions[i].serialized_entity_data.empty()) {
       TC3_ASSIGN_OR_RETURN(
           serialized_entity_data,
           JniHelper::NewByteArray(
-              env, action_result[i].serialized_entity_data.size()));
+              env, action_response.actions[i].serialized_entity_data.size()));
       TC3_RETURN_IF_ERROR(JniHelper::SetByteArrayRegion(
           env, serialized_entity_data.get(), 0,
-          action_result[i].serialized_entity_data.size(),
+          action_response.actions[i].serialized_entity_data.size(),
           reinterpret_cast<const jbyte*>(
-              action_result[i].serialized_entity_data.data())));
+              action_response.actions[i].serialized_entity_data.data())));
     }
 
     ScopedLocalRef<jobjectArray> remote_action_templates_result;
     if (generate_intents) {
       std::vector<RemoteActionTemplate> remote_action_templates;
       if (context->intent_generator()->GenerateIntents(
-              device_locales, action_result[i], conversation, app_context,
+              device_locales, action_response.actions[i], conversation,
+              app_context,
               /*annotations_entity_data_schema=*/annotations_entity_data_schema,
               /*actions_entity_data_schema=*/actions_entity_data_schema,
               &remote_action_templates)) {
@@ -191,23 +216,58 @@
 
     TC3_ASSIGN_OR_RETURN(ScopedLocalRef<jstring> reply,
                          context->jni_cache()->ConvertToJavaString(
-                             action_result[i].response_text));
+                             action_response.actions[i].response_text));
 
     TC3_ASSIGN_OR_RETURN(
         ScopedLocalRef<jstring> action_type,
-        JniHelper::NewStringUTF(env, action_result[i].type.c_str()));
+        JniHelper::NewStringUTF(env, action_response.actions[i].type.c_str()));
+
+    ScopedLocalRef<jobjectArray> slots;
+    if (!action_response.actions[i].slots.empty()) {
+      TC3_ASSIGN_OR_RETURN(slots,
+                           JniHelper::NewObjectArray(
+                               env, action_response.actions[i].slots.size(),
+                               slot_class.get(), nullptr));
+      for (int j = 0; j < action_response.actions[i].slots.size(); j++) {
+        const Slot& slot_c = action_response.actions[i].slots[j];
+        TC3_ASSIGN_OR_RETURN(ScopedLocalRef<jstring> slot_type,
+                             JniHelper::NewStringUTF(env, slot_c.type.c_str()));
+
+        TC3_ASSIGN_OR_RETURN(
+            ScopedLocalRef<jobject> slot,
+            JniHelper::NewObject(
+                env, slot_class.get(), slot_class_constructor, slot_type.get(),
+                slot_c.span.message_index, slot_c.span.span.first,
+                slot_c.span.span.second, slot_c.confidence_score));
+
+        TC3_RETURN_IF_ERROR(
+            JniHelper::SetObjectArrayElement(env, slots.get(), j, slot.get()));
+      }
+    }
 
     TC3_ASSIGN_OR_RETURN(
-        ScopedLocalRef<jobject> result,
-        JniHelper::NewObject(env, result_class.get(), result_class_constructor,
-                             reply.get(), action_type.get(),
-                             static_cast<jfloat>(action_result[i].score),
-                             extras.get(), serialized_entity_data.get(),
-                             remote_action_templates_result.get()));
+        ScopedLocalRef<jobject> action,
+        JniHelper::NewObject(
+            env, action_class.get(), action_class_constructor, reply.get(),
+            action_type.get(),
+            static_cast<jfloat>(action_response.actions[i].score), extras.get(),
+            serialized_entity_data.get(), remote_action_templates_result.get(),
+            slots.get()));
     TC3_RETURN_IF_ERROR(
-        JniHelper::SetObjectArrayElement(env, results.get(), i, result.get()));
+        JniHelper::SetObjectArrayElement(env, actions.get(), i, action.get()));
   }
-  return results;
+
+  // Create the ActionSuggestions object.
+  TC3_ASSIGN_OR_RETURN(
+      const jmethodID result_class_constructor,
+      JniHelper::GetMethodID(env, result_class.get(), "<init>",
+                             "([L" TC3_PACKAGE_PATH TC3_ACTIONS_CLASS_NAME_STR
+                             "$ActionSuggestion;Z)V"));
+  TC3_ASSIGN_OR_RETURN(
+      ScopedLocalRef<jobject> result,
+      JniHelper::NewObject(env, result_class.get(), result_class_constructor,
+                           actions.get(), action_response.is_sensitive));
+  return result;
 }
 
 StatusOr<ConversationMessage> FromJavaConversationMessage(JNIEnv* env,
@@ -354,7 +414,7 @@
 }  // namespace libtextclassifier3
 
 using libtextclassifier3::ActionsSuggestionsJniContext;
-using libtextclassifier3::ActionSuggestionsToJObjectArray;
+using libtextclassifier3::ActionSuggestionsToJObject;
 using libtextclassifier3::FromJavaActionSuggestionOptions;
 using libtextclassifier3::FromJavaConversation;
 using libtextclassifier3::JByteArrayToString;
@@ -435,7 +495,7 @@
 #endif  // TC3_UNILIB_JAVAICU
 }
 
-TC3_JNI_METHOD(jobjectArray, TC3_ACTIONS_CLASS_NAME, nativeSuggestActions)
+TC3_JNI_METHOD(jobject, TC3_ACTIONS_CLASS_NAME, nativeSuggestActions)
 (JNIEnv* env, jobject thiz, jlong ptr, jobject jconversation, jobject joptions,
  jlong annotatorPtr, jobject app_context, jstring device_locales,
  jboolean generate_intents) {
@@ -457,10 +517,10 @@
       annotator ? annotator->entity_data_schema() : nullptr;
 
   TC3_ASSIGN_OR_RETURN_NULL(
-      ScopedLocalRef<jobjectArray> result,
-      ActionSuggestionsToJObjectArray(
-          env, context, app_context, anntotations_entity_data_schema,
-          response.actions, conversation, device_locales, generate_intents));
+      ScopedLocalRef<jobject> result,
+      ActionSuggestionsToJObject(
+          env, context, app_context, anntotations_entity_data_schema, response,
+          conversation, device_locales, generate_intents));
   return result.release();
 }
 
@@ -533,3 +593,21 @@
   return reinterpret_cast<jlong>(
       reinterpret_cast<ActionsSuggestionsJniContext*>(ptr)->model());
 }
+
+TC3_JNI_METHOD(jboolean, TC3_ACTIONS_CLASS_NAME,
+               nativeInitializeConversationIntentDetection)
+(JNIEnv* env, jobject thiz, jlong ptr, jbyteArray jserialized_config) {
+  if (!ptr) {
+    return false;
+  }
+
+  ActionsSuggestions* model =
+      reinterpret_cast<ActionsSuggestionsJniContext*>(ptr)->model();
+
+  std::string serialized_config;
+  TC3_ASSIGN_OR_RETURN_0(
+      serialized_config, JByteArrayToString(env, jserialized_config),
+      TC3_LOG(ERROR) << "Could not convert serialized conversation intent "
+                        "detection config.");
+  return model->InitializeConversationIntentDetection(serialized_config);
+}
diff --git a/native/actions/actions_jni.h b/native/actions/actions_jni.h
index 5d6a79d..2d2d103 100644
--- a/native/actions/actions_jni.h
+++ b/native/actions/actions_jni.h
@@ -41,7 +41,11 @@
 (JNIEnv* env, jobject clazz, jint fd, jlong offset, jlong size,
  jbyteArray serialized_preconditions);
 
-TC3_JNI_METHOD(jobjectArray, TC3_ACTIONS_CLASS_NAME, nativeSuggestActions)
+TC3_JNI_METHOD(jboolean, TC3_ACTIONS_CLASS_NAME,
+               nativeInitializeConversationIntentDetection)
+(JNIEnv* env, jobject thiz, jlong ptr, jbyteArray jserialized_config);
+
+TC3_JNI_METHOD(jobject, TC3_ACTIONS_CLASS_NAME, nativeSuggestActions)
 (JNIEnv* env, jobject thiz, jlong ptr, jobject jconversation, jobject joptions,
  jlong annotatorPtr, jobject app_context, jstring device_locales,
  jboolean generate_intents);
diff --git a/native/actions/actions_model.fbs b/native/actions/actions_model.fbs
old mode 100755
new mode 100644
index 0db43f4..8c03eeb
--- a/native/actions/actions_model.fbs
+++ b/native/actions/actions_model.fbs
@@ -246,6 +246,17 @@
   tokenizer_options:ActionsTokenizerOptions;
 }
 
+// TFLite based sensitive topic classifier model.
+namespace libtextclassifier3;
+table TFLiteSensitiveClassifierConfig {
+  // Specification of the model.
+  model_spec:TensorflowLiteModelSpec;
+
+  // Triggering threshold, if a sensitive topic has a score higher than this
+  // value, it triggers the classifier.
+  threshold:float;
+}
+
 namespace libtextclassifier3;
 table TriggeringPreconditions {
   // Lower bound thresholds for the smart reply model prediction output.
@@ -554,6 +565,8 @@
 
   // Feature processor options.
   feature_processor_options:ActionsTokenFeatureProcessorOptions;
+
+  low_confidence_tflite_model:TFLiteSensitiveClassifierConfig;
 }
 
 root_type libtextclassifier3.ActionsModel;
diff --git a/native/actions/conversation_intent_detection/conversation-intent-detection-dummy.h b/native/actions/conversation_intent_detection/conversation-intent-detection-dummy.h
new file mode 100644
index 0000000..66255c5
--- /dev/null
+++ b/native/actions/conversation_intent_detection/conversation-intent-detection-dummy.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ACTIONS_CONVERSATION_INTENT_DETECTION_CONVERSATION_INTENT_DETECTION_DUMMY_H_
+#define LIBTEXTCLASSIFIER_ACTIONS_CONVERSATION_INTENT_DETECTION_CONVERSATION_INTENT_DETECTION_DUMMY_H_
+
+#include <string>
+#include <vector>
+
+#include "actions/types.h"
+#include "utils/base/status.h"
+#include "utils/base/statusor.h"
+
+namespace libtextclassifier3 {
+
+// A dummy implementation of conversation intent detection.
+class ConversationIntentDetection {
+ public:
+  ConversationIntentDetection() {}
+
+  Status Initialize(const std::string& serialized_config) { return Status::OK; }
+
+  StatusOr<std::vector<ActionSuggestion>> SuggestActions(
+      const Conversation& conversation, const ActionSuggestionOptions& options =
+                                            ActionSuggestionOptions()) const {
+    return Status::OK;
+  }
+};
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ACTIONS_CONVERSATION_INTENT_DETECTION_CONVERSATION_INTENT_DETECTION_DUMMY_H_
diff --git a/java/src/com/android/textclassifier/IModelDownloaderCallback.aidl b/native/actions/conversation_intent_detection/conversation-intent-detection.h
similarity index 61%
copy from java/src/com/android/textclassifier/IModelDownloaderCallback.aidl
copy to native/actions/conversation_intent_detection/conversation-intent-detection.h
index 7f9d7fb..949ceaf 100644
--- a/java/src/com/android/textclassifier/IModelDownloaderCallback.aidl
+++ b/native/actions/conversation_intent_detection/conversation-intent-detection.h
@@ -14,15 +14,9 @@
  * limitations under the License.
  */
 
-package com.android.textclassifier;
+#ifndef LIBTEXTCLASSIFIER_ACTIONS_CONVERSATION_INTENT_DETECTION_CONVERSATION_INTENT_DETECTION_H_
+#define LIBTEXTCLASSIFIER_ACTIONS_CONVERSATION_INTENT_DETECTION_CONVERSATION_INTENT_DETECTION_H_
 
-/**
- * Callback for download requests from ModelDownloaderImpl to
- * ModelDownloaderService.
- */
-oneway interface IModelDownloaderCallback {
+#include "actions/conversation_intent_detection/conversation-intent-detection-dummy.h"
 
-  void onSuccess(long bytesWritten);
-
-  void onFailure(String error);
-}
\ No newline at end of file
+#endif  // LIBTEXTCLASSIFIER_ACTIONS_CONVERSATION_INTENT_DETECTION_CONVERSATION_INTENT_DETECTION_H_
diff --git a/native/actions/grammar-actions.cc b/native/actions/grammar-actions.cc
index e925086..bf99edc 100644
--- a/native/actions/grammar-actions.cc
+++ b/native/actions/grammar-actions.cc
@@ -159,8 +159,8 @@
 
   for (const grammar::EvaluatedDerivation& evaluated_derivation :
        evaluated_derivations.ValueOrDie()) {
-    if (!InstantiateActionsFromMatch(text, message_index,
-                                     evaluated_derivation.derivation, result)) {
+    if (!InstantiateActionsFromMatch(text, message_index, evaluated_derivation,
+                                     result)) {
       TC3_LOG(ERROR) << "Could not instantiate actions from a grammar match.";
       return false;
     }
diff --git a/native/actions/ngram-model.cc b/native/actions/ngram-model.cc
index fb3992c..94ec8b2 100644
--- a/native/actions/ngram-model.cc
+++ b/native/actions/ngram-model.cc
@@ -60,7 +60,7 @@
 
 }  // anonymous namespace
 
-std::unique_ptr<NGramModel> NGramModel::Create(
+std::unique_ptr<NGramSensitiveModel> NGramSensitiveModel::Create(
     const UniLib* unilib, const NGramLinearRegressionModel* model,
     const Tokenizer* tokenizer) {
   if (model == nullptr) {
@@ -70,12 +70,13 @@
     TC3_LOG(ERROR) << "No tokenizer options specified.";
     return nullptr;
   }
-  return std::unique_ptr<NGramModel>(new NGramModel(unilib, model, tokenizer));
+  return std::unique_ptr<NGramSensitiveModel>(
+      new NGramSensitiveModel(unilib, model, tokenizer));
 }
 
-NGramModel::NGramModel(const UniLib* unilib,
-                       const NGramLinearRegressionModel* model,
-                       const Tokenizer* tokenizer)
+NGramSensitiveModel::NGramSensitiveModel(
+    const UniLib* unilib, const NGramLinearRegressionModel* model,
+    const Tokenizer* tokenizer)
     : model_(model) {
   // Create new tokenizer if options are specified, reuse feature processor
   // tokenizer otherwise.
@@ -88,9 +89,10 @@
 }
 
 // Returns whether a given n-gram matches the token stream.
-bool NGramModel::IsNGramMatch(const uint32* tokens, size_t num_tokens,
-                              const uint32* ngram_tokens,
-                              size_t num_ngram_tokens, int max_skips) const {
+bool NGramSensitiveModel::IsNGramMatch(const uint32* tokens, size_t num_tokens,
+                                       const uint32* ngram_tokens,
+                                       size_t num_ngram_tokens,
+                                       int max_skips) const {
   int token_idx = 0, ngram_token_idx = 0, skip_remain = 0;
   for (; token_idx < num_tokens && ngram_token_idx < num_ngram_tokens;) {
     if (tokens[token_idx] == ngram_tokens[ngram_token_idx]) {
@@ -112,8 +114,9 @@
 
 // Calculates the total number of skip-grams that can be created for a stream
 // with the given number of tokens.
-uint64 NGramModel::GetNumSkipGrams(int num_tokens, int max_ngram_length,
-                                   int max_skips) {
+uint64 NGramSensitiveModel::GetNumSkipGrams(int num_tokens,
+                                            int max_ngram_length,
+                                            int max_skips) {
   // Start with unigrams.
   uint64 total = num_tokens;
   for (int ngram_len = 2;
@@ -138,7 +141,8 @@
   return total;
 }
 
-std::pair<int, int> NGramModel::GetFirstTokenMatches(uint32 token_hash) const {
+std::pair<int, int> NGramSensitiveModel::GetFirstTokenMatches(
+    uint32 token_hash) const {
   const int num_ngrams = model_->ngram_weights()->size();
   const auto start_it = FirstTokenIterator(model_, 0);
   const auto end_it = FirstTokenIterator(model_, num_ngrams);
@@ -147,15 +151,13 @@
   return std::make_pair(start, end);
 }
 
-bool NGramModel::Eval(const UnicodeText& text, float* score) const {
+std::pair<bool, float> NGramSensitiveModel::Eval(
+    const UnicodeText& text) const {
   const std::vector<Token> raw_tokens = tokenizer_->Tokenize(text);
 
   // If we have no tokens, then just bail early.
   if (raw_tokens.empty()) {
-    if (score != nullptr) {
-      *score = model_->default_token_weight();
-    }
-    return false;
+    return std::make_pair(false, model_->default_token_weight());
   }
 
   // Hash the tokens.
@@ -201,25 +203,25 @@
   const float internal_score =
       (weight_matches + (model_->default_token_weight() * num_misses)) /
       num_candidates;
-  if (score != nullptr) {
-    *score = internal_score;
-  }
-  return internal_score > model_->threshold();
+  return std::make_pair(internal_score > model_->threshold(), internal_score);
 }
 
-bool NGramModel::EvalConversation(const Conversation& conversation,
-                                  const int num_messages) const {
+std::pair<bool, float> NGramSensitiveModel::EvalConversation(
+    const Conversation& conversation, const int num_messages) const {
+  float score = 0.0;
   for (int i = 1; i <= num_messages; i++) {
     const std::string& message =
         conversation.messages[conversation.messages.size() - i].text;
     const UnicodeText message_unicode(
         UTF8ToUnicodeText(message, /*do_copy=*/false));
     // Run ngram linear regression model.
-    if (Eval(message_unicode)) {
-      return true;
+    const auto prediction = Eval(message_unicode);
+    if (prediction.first) {
+      return prediction;
     }
+    score = std::max(score, prediction.second);
   }
-  return false;
+  return std::make_pair(false, score);
 }
 
 }  // namespace libtextclassifier3
diff --git a/native/actions/ngram-model.h b/native/actions/ngram-model.h
index a9072cd..32fd54b 100644
--- a/native/actions/ngram-model.h
+++ b/native/actions/ngram-model.h
@@ -20,6 +20,7 @@
 #include <memory>
 
 #include "actions/actions_model_generated.h"
+#include "actions/sensitive-classifier-base.h"
 #include "actions/types.h"
 #include "utils/tokenizer.h"
 #include "utils/utf8/unicodetext.h"
@@ -27,29 +28,30 @@
 
 namespace libtextclassifier3 {
 
-class NGramModel {
+class NGramSensitiveModel : public SensitiveTopicModelBase {
  public:
-  static std::unique_ptr<NGramModel> Create(
+  static std::unique_ptr<NGramSensitiveModel> Create(
       const UniLib* unilib, const NGramLinearRegressionModel* model,
       const Tokenizer* tokenizer);
 
   // Evaluates an n-gram linear regression model, and tests against the
   // threshold. Returns true in case of a positive classification. The caller
   // may also optionally query the score.
-  bool Eval(const UnicodeText& text, float* score = nullptr) const;
+  std::pair<bool, float> Eval(const UnicodeText& text) const override;
 
   // Evaluates an n-gram linear regression model against all messages in a
   // conversation and returns true in case of any positive classification.
-  bool EvalConversation(const Conversation& conversation,
-                        const int num_messages) const;
+  std::pair<bool, float> EvalConversation(const Conversation& conversation,
+                                          int num_messages) const override;
 
   // Exposed for testing only.
   static uint64 GetNumSkipGrams(int num_tokens, int max_ngram_length,
                                 int max_skips);
 
  private:
-  NGramModel(const UniLib* unilib, const NGramLinearRegressionModel* model,
-             const Tokenizer* tokenizer);
+  explicit NGramSensitiveModel(const UniLib* unilib,
+                               const NGramLinearRegressionModel* model,
+                               const Tokenizer* tokenizer);
 
   // Returns the (begin,end] range of n-grams where the first hashed token
   // matches the given value.
diff --git a/native/actions/sensitive-classifier-base.h b/native/actions/sensitive-classifier-base.h
new file mode 100644
index 0000000..b0ecacd
--- /dev/null
+++ b/native/actions/sensitive-classifier-base.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ACTIONS_SENSITIVE_CLASSIFIER_BASE_H_
+#define LIBTEXTCLASSIFIER_ACTIONS_SENSITIVE_CLASSIFIER_BASE_H_
+
+#include <memory>
+#include <utility>
+
+#include "actions/types.h"
+#include "utils/utf8/unicodetext.h"
+
+namespace libtextclassifier3 {
+class SensitiveTopicModelBase {
+ public:
+  // Returns a pair: a boolean, which is  true if the topic is sensitive, and a
+  // score.
+  virtual std::pair<bool, float> Eval(const UnicodeText& text) const = 0;
+  virtual std::pair<bool, float> EvalConversation(
+      const Conversation& conversation, int num_messages) const = 0;
+
+  virtual ~SensitiveTopicModelBase() {}
+};
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ACTIONS_SENSITIVE_CLASSIFIER_BASE_H_
diff --git a/native/actions/test_data/actions_suggestions_grammar_test.model b/native/actions/test_data/actions_suggestions_grammar_test.model
index 1af22aa..d122687 100644
--- a/native/actions/test_data/actions_suggestions_grammar_test.model
+++ b/native/actions/test_data/actions_suggestions_grammar_test.model
Binary files differ
diff --git a/native/actions/test_data/actions_suggestions_test.model b/native/actions/test_data/actions_suggestions_test.model
index 1361475..2d97bc8 100644
--- a/native/actions/test_data/actions_suggestions_test.model
+++ b/native/actions/test_data/actions_suggestions_test.model
Binary files differ
diff --git a/native/actions/test_data/actions_suggestions_test.multi_task_9heads.model b/native/actions/test_data/actions_suggestions_test.multi_task_9heads.model
index 1396a46..567828b 100644
--- a/native/actions/test_data/actions_suggestions_test.multi_task_9heads.model
+++ b/native/actions/test_data/actions_suggestions_test.multi_task_9heads.model
Binary files differ
diff --git a/native/actions/test_data/actions_suggestions_test.multi_task_sr_emoji.model b/native/actions/test_data/actions_suggestions_test.multi_task_sr_emoji.model
new file mode 100644
index 0000000..99f9040
--- /dev/null
+++ b/native/actions/test_data/actions_suggestions_test.multi_task_sr_emoji.model
Binary files differ
diff --git a/native/actions/test_data/actions_suggestions_test.multi_task_sr_nudge_signal_v0.model b/native/actions/test_data/actions_suggestions_test.multi_task_sr_nudge_signal_v0.model
index 660d97f..504d8e0 100644
--- a/native/actions/test_data/actions_suggestions_test.multi_task_sr_nudge_signal_v0.model
+++ b/native/actions/test_data/actions_suggestions_test.multi_task_sr_nudge_signal_v0.model
Binary files differ
diff --git a/native/actions/test_data/actions_suggestions_test.multi_task_sr_p13n.model b/native/actions/test_data/actions_suggestions_test.multi_task_sr_p13n.model
new file mode 100644
index 0000000..33926c2
--- /dev/null
+++ b/native/actions/test_data/actions_suggestions_test.multi_task_sr_p13n.model
Binary files differ
diff --git a/native/actions/test_data/actions_suggestions_test.multi_task_tf2_test.model b/native/actions/test_data/actions_suggestions_test.multi_task_tf2_test.model
new file mode 100644
index 0000000..730f603
--- /dev/null
+++ b/native/actions/test_data/actions_suggestions_test.multi_task_tf2_test.model
Binary files differ
diff --git a/native/actions/test_data/actions_suggestions_test.sensitive_tflite.model b/native/actions/test_data/actions_suggestions_test.sensitive_tflite.model
new file mode 100644
index 0000000..29fe077
--- /dev/null
+++ b/native/actions/test_data/actions_suggestions_test.sensitive_tflite.model
Binary files differ
diff --git a/native/actions/test_data/en_sensitive_topic_2019117.tflite b/native/actions/test_data/en_sensitive_topic_2019117.tflite
new file mode 100644
index 0000000..48edfbd
--- /dev/null
+++ b/native/actions/test_data/en_sensitive_topic_2019117.tflite
Binary files differ
diff --git a/native/actions/tflite-sensitive-model.cc b/native/actions/tflite-sensitive-model.cc
new file mode 100644
index 0000000..e68d1d5
--- /dev/null
+++ b/native/actions/tflite-sensitive-model.cc
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "actions/tflite-sensitive-model.h"
+
+#include <utility>
+
+#include "actions/actions_model_generated.h"
+#include "actions/types.h"
+
+namespace libtextclassifier3 {
+namespace {
+const char kNotSensitive[] = "NOT_SENSITIVE";
+}  // namespace
+
+std::unique_ptr<TFLiteSensitiveModel> TFLiteSensitiveModel::Create(
+    const TFLiteSensitiveClassifierConfig* model_config) {
+  auto result_model = std::unique_ptr<TFLiteSensitiveModel>(
+      new TFLiteSensitiveModel(model_config));
+  if (result_model->model_executor_ == nullptr) {
+    return nullptr;
+  }
+  return result_model;
+}
+
+std::pair<bool, float> TFLiteSensitiveModel::Eval(
+    const UnicodeText& text) const {
+  // Create a conversation with one message and classify it.
+  Conversation conversation;
+  conversation.messages.emplace_back();
+  conversation.messages.front().text = text.ToUTF8String();
+
+  return EvalConversation(conversation, 1);
+}
+
+std::pair<bool, float> TFLiteSensitiveModel::EvalConversation(
+    const Conversation& conversation, int num_messages) const {
+  if (model_executor_ == nullptr) {
+    return std::make_pair(false, 0.0f);
+  }
+  const auto interpreter = model_executor_->CreateInterpreter();
+
+  if (interpreter->AllocateTensors() != kTfLiteOk) {
+    // TODO(mgubin):  report error that tensors can't be allocated.
+    return std::make_pair(false, 0.0f);
+  }
+  // The sensitive model is actually an ordinary TFLite model with Lingua API,
+  // prepare texts and user_ids similar way, it doesn't use timediffs.
+  std::vector<std::string> context;
+  std::vector<int> user_ids;
+  context.reserve(num_messages);
+  user_ids.reserve(num_messages);
+
+  // Gather last `num_messages` messages from the conversation.
+  for (int i = conversation.messages.size() - num_messages;
+       i < conversation.messages.size(); i++) {
+    const ConversationMessage& message = conversation.messages[i];
+    context.push_back(message.text);
+    user_ids.push_back(message.user_id);
+  }
+
+  // Allocate tensors.
+  //
+
+  if (model_config_->model_spec()->input_context() >= 0) {
+    if (model_config_->model_spec()->input_length_to_pad() > 0) {
+      context.resize(model_config_->model_spec()->input_length_to_pad());
+    }
+    model_executor_->SetInput<std::string>(
+        model_config_->model_spec()->input_context(), context,
+        interpreter.get());
+  }
+  if (model_config_->model_spec()->input_context_length() >= 0) {
+    model_executor_->SetInput<int>(
+        model_config_->model_spec()->input_context_length(), context.size(),
+        interpreter.get());
+  }
+
+  // Num suggestions is always locked to 3.
+  if (model_config_->model_spec()->input_num_suggestions() > 0) {
+    model_executor_->SetInput<int>(
+        model_config_->model_spec()->input_num_suggestions(), 3,
+        interpreter.get());
+  }
+
+  if (interpreter->Invoke() != kTfLiteOk) {
+    // TODO(mgubin): Report a error about invoke.
+    return std::make_pair(false, 0.0f);
+  }
+
+  // Check that the prediction is not-sensitive.
+  const std::vector<tflite::StringRef> replies =
+      model_executor_->Output<tflite::StringRef>(
+          model_config_->model_spec()->output_replies(), interpreter.get());
+  const TensorView<float> scores = model_executor_->OutputView<float>(
+      model_config_->model_spec()->output_replies_scores(), interpreter.get());
+  for (int i = 0; i < replies.size(); ++i) {
+    const auto reply = replies[i];
+    if (reply.len != sizeof(kNotSensitive) - 1 &&
+        0 != memcmp(reply.str, kNotSensitive, sizeof(kNotSensitive))) {
+      const auto score = scores.data()[i];
+      if (score >= model_config_->threshold()) {
+        return std::make_pair(true, score);
+      }
+    }
+  }
+  return std::make_pair(false, 1.0);
+}
+
+TFLiteSensitiveModel::TFLiteSensitiveModel(
+    const TFLiteSensitiveClassifierConfig* model_config)
+    : model_config_(model_config),
+      model_executor_(TfLiteModelExecutor::FromBuffer(
+          model_config->model_spec()->tflite_model())) {}
+}  // namespace libtextclassifier3
diff --git a/native/actions/tflite-sensitive-model.h b/native/actions/tflite-sensitive-model.h
new file mode 100644
index 0000000..2f161a8
--- /dev/null
+++ b/native/actions/tflite-sensitive-model.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ACTIONS_TFLITE_SENSITIVE_MODEL_H_
+#define LIBTEXTCLASSIFIER_ACTIONS_TFLITE_SENSITIVE_MODEL_H_
+
+#include <memory>
+
+#include "actions/actions_model_generated.h"
+#include "actions/sensitive-classifier-base.h"
+#include "utils/tflite-model-executor.h"
+
+namespace libtextclassifier3 {
+class TFLiteSensitiveModel : public SensitiveTopicModelBase {
+ public:
+  // The object keeps but doesn't own model_config.
+  static std::unique_ptr<TFLiteSensitiveModel> Create(
+      const TFLiteSensitiveClassifierConfig* model_config);
+
+  std::pair<bool, float> Eval(const UnicodeText& text) const override;
+  std::pair<bool, float> EvalConversation(const Conversation& conversation,
+                                          int num_messages) const override;
+
+ private:
+  explicit TFLiteSensitiveModel(
+      const TFLiteSensitiveClassifierConfig* model_config);
+  const TFLiteSensitiveClassifierConfig* model_config_ = nullptr;  // not owned.
+  std::unique_ptr<const TfLiteModelExecutor> model_executor_;
+};
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ACTIONS_TFLITE_SENSITIVE_MODEL_H_
diff --git a/native/actions/types.h b/native/actions/types.h
index 8862262..c400bb2 100644
--- a/native/actions/types.h
+++ b/native/actions/types.h
@@ -19,6 +19,7 @@
 
 #include <map>
 #include <string>
+#include <unordered_map>
 #include <vector>
 
 #include "actions/actions-entity-data_generated.h"
@@ -55,6 +56,13 @@
   std::string name;
 };
 
+// A slot associated with an action {
+struct Slot {
+  std::string type;
+  MessageTextSpan span;
+  float confidence_score;
+};
+
 // Action suggestion that contains a response text and the type of the response.
 struct ActionSuggestion {
   // Text of the action suggestion.
@@ -75,12 +83,21 @@
   // Extras information.
   std::string serialized_entity_data;
 
+  // Slots corresponding to the action suggestion.
+  std::vector<Slot> slots;
+
   const ActionsEntityData* entity_data() const {
     return LoadAndVerifyFlatbuffer<ActionsEntityData>(
         serialized_entity_data.data(), serialized_entity_data.size());
   }
 };
 
+// Options for suggesting actions.
+struct ActionSuggestionOptions {
+  static ActionSuggestionOptions Default() { return ActionSuggestionOptions(); }
+  std::unordered_map<std::string, Variant> model_parameters;
+};
+
 // Actions suggestions result containing meta - information and the suggested
 // actions.
 struct ActionsSuggestionsResponse {
@@ -88,8 +105,8 @@
   float sensitivity_score = -1.f;
   float triggering_score = -1.f;
 
-  // Whether the output was suppressed by the sensitivity threshold.
-  bool output_filtered_sensitivity = false;
+  // Whether the input conversation is considered as sensitive.
+  bool is_sensitive = false;
 
   // Whether the output was suppressed by the triggering score threshold.
   bool output_filtered_min_triggering_score = false;
diff --git a/native/annotator/annotator.cc b/native/annotator/annotator.cc
index 53176e6..e296a64 100644
--- a/native/annotator/annotator.cc
+++ b/native/annotator/annotator.cc
@@ -27,6 +27,7 @@
 #include <vector>
 
 #include "annotator/collections.h"
+#include "annotator/datetime/grammar-parser.h"
 #include "annotator/datetime/regex-parser.h"
 #include "annotator/flatbuffer-utils.h"
 #include "annotator/knowledge/knowledge-engine-types.h"
@@ -37,6 +38,7 @@
 #include "utils/base/statusor.h"
 #include "utils/calendar/calendar.h"
 #include "utils/checksum.h"
+#include "utils/grammar/analyzer.h"
 #include "utils/i18n/locale-list.h"
 #include "utils/i18n/locale.h"
 #include "utils/math/softmax.h"
@@ -416,7 +418,17 @@
     }
   }
 
-  if (model_->datetime_model()) {
+  if (model_->datetime_grammar_model()) {
+    if (model_->datetime_grammar_model()->rules()) {
+      analyzer_ = std::make_unique<grammar::Analyzer>(
+          unilib_, model_->datetime_grammar_model()->rules());
+      datetime_grounder_ = std::make_unique<DatetimeGrounder>(calendarlib_);
+      datetime_parser_ = std::make_unique<GrammarDatetimeParser>(
+          *analyzer_, *datetime_grounder_,
+          /*target_classification_score=*/1.0,
+          /*priority_score=*/1.0);
+    }
+  } else if (model_->datetime_model()) {
     datetime_parser_ = RegexDatetimeParser::Instance(
         model_->datetime_model(), unilib_, calendarlib_, decompressor.get());
     if (!datetime_parser_) {
@@ -686,24 +698,6 @@
   return false;
 }
 
-namespace {
-
-int CountDigits(const std::string& str,
-                const CodepointSpan& selection_indices) {
-  int count = 0;
-  int i = 0;
-  const UnicodeText unicode_str = UTF8ToUnicodeText(str, /*do_copy=*/false);
-  for (auto it = unicode_str.begin(); it != unicode_str.end(); ++it, ++i) {
-    if (i >= selection_indices.first && i < selection_indices.second &&
-        IsDigit(*it)) {
-      ++count;
-    }
-  }
-  return count;
-}
-
-}  // namespace
-
 namespace internal {
 // Helper function, which if the initial 'span' contains only white-spaces,
 // moves the selection to a single-codepoint selection on a left or right side
@@ -909,9 +903,11 @@
     return original_click_indices;
   }
   if (knowledge_engine_ != nullptr &&
-      !knowledge_engine_->Chunk(context, options.annotation_usecase,
-                                options.location_context, Permissions(),
-                                AnnotateMode::kEntityAnnotation, &candidates)) {
+      !knowledge_engine_
+           ->Chunk(context, options.annotation_usecase,
+                   options.location_context, Permissions(),
+                   AnnotateMode::kEntityAnnotation, &candidates)
+           .ok()) {
     TC3_LOG(ERROR) << "Knowledge suggest selection failed.";
     return original_click_indices;
   }
@@ -1002,12 +998,13 @@
       if (candidates.annotated_spans[0][i].classification.empty() &&
           model_->selection_options()->always_classify_suggested_selection() &&
           !filtered_collections_selection_.empty()) {
-        if (!ModelClassifyText(
-                context, detected_text_language_tags,
-                candidates.annotated_spans[0][i].span, options,
-                &interpreter_manager,
-                /*embedding_cache=*/nullptr,
-                &candidates.annotated_spans[0][i].classification)) {
+        if (!ModelClassifyText(context, /*cached_tokens=*/{},
+                               detected_text_language_tags,
+                               candidates.annotated_spans[0][i].span, options,
+                               &interpreter_manager,
+                               /*embedding_cache=*/nullptr,
+                               &candidates.annotated_spans[0][i].classification,
+                               /*tokens=*/nullptr)) {
           return original_click_indices;
         }
       }
@@ -1158,7 +1155,8 @@
     std::vector<ClassificationResult> classification;
     if (!ModelClassifyText(context, cached_tokens, detected_text_language_tags,
                            candidates[i].span, options, interpreter_manager,
-                           /*embedding_cache=*/nullptr, &classification)) {
+                           /*embedding_cache=*/nullptr, &classification,
+                           /*tokens=*/nullptr)) {
       return false;
     }
 
@@ -1260,8 +1258,10 @@
 
   int click_pos;
   *tokens = selection_feature_processor_->Tokenize(context_unicode);
+  const auto [click_begin, click_end] =
+      CodepointSpanToUnicodeTextRange(context_unicode, click_indices);
   selection_feature_processor_->RetokenizeAndFindClick(
-      context_unicode, click_indices,
+      context_unicode, click_begin, click_end, click_indices,
       selection_feature_processor_->GetOptions()->only_use_line_with_click(),
       tokens, &click_pos);
   if (click_pos == kInvalidIndex) {
@@ -1352,18 +1352,6 @@
   return true;
 }
 
-bool Annotator::ModelClassifyText(
-    const std::string& context,
-    const std::vector<Locale>& detected_text_language_tags,
-    const CodepointSpan& selection_indices, const BaseOptions& options,
-    InterpreterManager* interpreter_manager,
-    FeatureProcessor::EmbeddingCache* embedding_cache,
-    std::vector<ClassificationResult>* classification_results) const {
-  return ModelClassifyText(context, {}, detected_text_language_tags,
-                           selection_indices, options, interpreter_manager,
-                           embedding_cache, classification_results);
-}
-
 namespace internal {
 std::vector<Token> CopyCachedTokens(const std::vector<Token>& cached_tokens,
                                     const CodepointSpan& selection_indices,
@@ -1433,16 +1421,24 @@
     const CodepointSpan& selection_indices, const BaseOptions& options,
     InterpreterManager* interpreter_manager,
     FeatureProcessor::EmbeddingCache* embedding_cache,
-    std::vector<ClassificationResult>* classification_results) const {
-  std::vector<Token> tokens;
-  return ModelClassifyText(context, cached_tokens, detected_text_language_tags,
-                           selection_indices, options, interpreter_manager,
-                           embedding_cache, classification_results, &tokens);
+    std::vector<ClassificationResult>* classification_results,
+    std::vector<Token>* tokens) const {
+  const UnicodeText context_unicode =
+      UTF8ToUnicodeText(context, /*do_copy=*/false);
+  const auto [span_begin, span_end] =
+      CodepointSpanToUnicodeTextRange(context_unicode, selection_indices);
+  return ModelClassifyText(context_unicode, cached_tokens,
+                           detected_text_language_tags, span_begin, span_end,
+                           /*line=*/nullptr, selection_indices, options,
+                           interpreter_manager, embedding_cache,
+                           classification_results, tokens);
 }
 
 bool Annotator::ModelClassifyText(
-    const std::string& context, const std::vector<Token>& cached_tokens,
+    const UnicodeText& context_unicode, const std::vector<Token>& cached_tokens,
     const std::vector<Locale>& detected_text_language_tags,
+    const UnicodeText::const_iterator& span_begin,
+    const UnicodeText::const_iterator& span_end, const UnicodeTextRange* line,
     const CodepointSpan& selection_indices, const BaseOptions& options,
     InterpreterManager* interpreter_manager,
     FeatureProcessor::EmbeddingCache* embedding_cache,
@@ -1460,8 +1456,13 @@
     return true;
   }
 
+  std::vector<Token> local_tokens;
+  if (tokens == nullptr) {
+    tokens = &local_tokens;
+  }
+
   if (cached_tokens.empty()) {
-    *tokens = classification_feature_processor_->Tokenize(context);
+    *tokens = classification_feature_processor_->Tokenize(context_unicode);
   } else {
     *tokens = internal::CopyCachedTokens(cached_tokens, selection_indices,
                                          ClassifyTextUpperBoundNeededTokens());
@@ -1469,7 +1470,7 @@
 
   int click_pos;
   classification_feature_processor_->RetokenizeAndFindClick(
-      context, selection_indices,
+      context_unicode, span_begin, span_end, selection_indices,
       classification_feature_processor_->GetOptions()
           ->only_use_line_with_click(),
       tokens, &click_pos);
@@ -1572,7 +1573,7 @@
 
   // Sanity checks.
   if (top_collection == Collections::Phone()) {
-    const int digit_count = CountDigits(context, selection_indices);
+    const int digit_count = std::count_if(span_begin, span_end, IsDigit);
     if (digit_count <
             model_->classification_options()->phone_min_num_digits() ||
         digit_count >
@@ -1772,9 +1773,11 @@
   // TODO(b/126579108): Propagate error status.
   ClassificationResult knowledge_result;
   if (knowledge_engine_ &&
-      knowledge_engine_->ClassifyText(
-          context, selection_indices, options.annotation_usecase,
-          options.location_context, Permissions(), &knowledge_result)) {
+      knowledge_engine_
+          ->ClassifyText(context, selection_indices, options.annotation_usecase,
+                         options.location_context, Permissions(),
+                         &knowledge_result)
+          .ok()) {
     candidates.push_back({selection_indices, {knowledge_result}});
     candidates.back().source = AnnotatedSpan::Source::KNOWLEDGE;
   }
@@ -2054,14 +2057,28 @@
       // Skip empty spans.
       if (codepoint_span.first != codepoint_span.second) {
         std::vector<ClassificationResult> classification;
-        if (!ModelClassifyText(line_str, line_tokens,
-                               detected_text_language_tags, codepoint_span,
-                               options, interpreter_manager, &embedding_cache,
-                               &classification)) {
-          TC3_LOG(ERROR) << "Could not classify text: "
-                         << (codepoint_span.first + offset) << " "
-                         << (codepoint_span.second + offset);
-          return false;
+        if (options.enable_optimization) {
+          if (!ModelClassifyText(
+                  line_unicode, line_tokens, detected_text_language_tags,
+                  /*span_begin=*/line_codepoints[codepoint_span.first],
+                  /*span_end=*/line_codepoints[codepoint_span.second], &line,
+                  codepoint_span, options, interpreter_manager,
+                  &embedding_cache, &classification, /*tokens=*/nullptr)) {
+            TC3_LOG(ERROR) << "Could not classify text: "
+                           << (codepoint_span.first + offset) << " "
+                           << (codepoint_span.second + offset);
+            return false;
+          }
+        } else {
+          if (!ModelClassifyText(line_str, line_tokens,
+                                 detected_text_language_tags, codepoint_span,
+                                 options, interpreter_manager, &embedding_cache,
+                                 &classification, /*tokens=*/nullptr)) {
+            TC3_LOG(ERROR) << "Could not classify text: "
+                           << (codepoint_span.first + offset) << " "
+                           << (codepoint_span.second + offset);
+            return false;
+          }
         }
 
         // Do not include the span if it's classified as "other".
@@ -2722,14 +2739,19 @@
     std::string quantity;
     GetMoneyQuantityFromCapturingGroup(match, config, context_unicode,
                                        &quantity, &quantity_exponent);
-    if ((quantity_exponent > 0 && quantity_exponent < 9) ||
-        (quantity_exponent == 9 && data->money->amount_whole_part <= 2)) {
-      data->money->amount_whole_part =
+    if (quantity_exponent > 0 && quantity_exponent <= 9) {
+      const double amount_whole_part =
           data->money->amount_whole_part * pow(10, quantity_exponent) +
           data->money->nanos / pow(10, 9 - quantity_exponent);
-      data->money->nanos = data->money->nanos %
-                           static_cast<int>(pow(10, 9 - quantity_exponent)) *
-                           pow(10, quantity_exponent);
+      // TODO(jacekj): Change type of `data->money->amount_whole_part` to int64
+      // (and `std::numeric_limits<int>::max()` to
+      // `std::numeric_limits<int64>::max()`).
+      if (amount_whole_part < std::numeric_limits<int>::max()) {
+        data->money->amount_whole_part = amount_whole_part;
+        data->money->nanos = data->money->nanos %
+                             static_cast<int>(pow(10, 9 - quantity_exponent)) *
+                             pow(10, quantity_exponent);
+      }
     }
     if (quantity_exponent > 0) {
       data->money->unnormalized_amount = strings::JoinStrings(
@@ -3164,10 +3186,13 @@
   return LoadAndVerifyModel(buffer, size);
 }
 
-bool Annotator::LookUpKnowledgeEntity(
-    const std::string& id, std::string* serialized_knowledge_result) const {
-  return knowledge_engine_ &&
-         knowledge_engine_->LookUpEntity(id, serialized_knowledge_result);
+StatusOr<std::string> Annotator::LookUpKnowledgeEntity(
+    const std::string& id) const {
+  if (!knowledge_engine_) {
+    return Status(StatusCode::FAILED_PRECONDITION,
+                  "knowledge_engine_ is nullptr");
+  }
+  return knowledge_engine_->LookUpEntity(id);
 }
 
 StatusOr<std::string> Annotator::LookUpKnowledgeEntityProperty(
diff --git a/native/annotator/annotator.h b/native/annotator/annotator.h
index a570a83..d69fe32 100644
--- a/native/annotator/annotator.h
+++ b/native/annotator/annotator.h
@@ -26,6 +26,7 @@
 #include <vector>
 
 #include "annotator/contact/contact-engine.h"
+#include "annotator/datetime/datetime-grounder.h"
 #include "annotator/datetime/parser.h"
 #include "annotator/duration/duration.h"
 #include "annotator/experimental/experimental.h"
@@ -50,6 +51,7 @@
 #include "utils/flatbuffers/mutable.h"
 #include "utils/i18n/locale.h"
 #include "utils/memory/mmap.h"
+#include "utils/utf8/unicodetext.h"
 #include "utils/utf8/unilib.h"
 #include "utils/zlib/zlib.h"
 #include "lang_id/lang-id.h"
@@ -214,10 +216,9 @@
       const std::string& context,
       const AnnotationOptions& options = AnnotationOptions()) const;
 
-  // Looks up a knowledge entity by its id. If successful, populates the
-  // serialized knowledge result and returns true.
-  bool LookUpKnowledgeEntity(const std::string& id,
-                             std::string* serialized_knowledge_result) const;
+  // Looks up a knowledge entity by its id. Returns the serialized knowledge
+  // result.
+  StatusOr<std::string> LookUpKnowledgeEntity(const std::string& id) const;
 
   // Looks up an entity's property.
   StatusOr<std::string> LookUpKnowledgeEntityProperty(
@@ -292,6 +293,10 @@
 
   // Classifies the selected text given the context string with the
   // classification model.
+  // The following arguments are optional:
+  //   - cached_tokens - can be given as empty
+  //   - embedding_cache - can be given as nullptr
+  //   - tokens - can be given as nullptr
   // Returns true if no error occurred.
   bool ModelClassifyText(
       const std::string& context, const std::vector<Token>& cached_tokens,
@@ -302,23 +307,23 @@
       std::vector<ClassificationResult>* classification_results,
       std::vector<Token>* tokens) const;
 
-  // Same as above but doesn't output tokens.
+  // Same as above, but (for optimization) takes the context as UnicodeText and
+  // takes the following extra arguments:
+  //   - span_begin, span_end - iterators in context_unicode corresponding to
+  //     selection_indices
+  //   - line - a UnicodeTextRange within context_unicode corresponding to the
+  //     line containing the selection - optional, can be given as nullptr
   bool ModelClassifyText(
-      const std::string& context, const std::vector<Token>& cached_tokens,
+      const UnicodeText& context_unicode,
+      const std::vector<Token>& cached_tokens,
       const std::vector<Locale>& detected_text_language_tags,
+      const UnicodeText::const_iterator& span_begin,
+      const UnicodeText::const_iterator& span_end, const UnicodeTextRange* line,
       const CodepointSpan& selection_indices, const BaseOptions& options,
       InterpreterManager* interpreter_manager,
       FeatureProcessor::EmbeddingCache* embedding_cache,
-      std::vector<ClassificationResult>* classification_results) const;
-
-  // Same as above but doesn't take cached tokens and doesn't output tokens.
-  bool ModelClassifyText(
-      const std::string& context,
-      const std::vector<Locale>& detected_text_language_tags,
-      const CodepointSpan& selection_indices, const BaseOptions& options,
-      InterpreterManager* interpreter_manager,
-      FeatureProcessor::EmbeddingCache* embedding_cache,
-      std::vector<ClassificationResult>* classification_results) const;
+      std::vector<ClassificationResult>* classification_results,
+      std::vector<Token>* tokens) const;
 
   // Returns a relative token span that represents how many tokens on the left
   // from the selection and right from the selection are needed for the
@@ -444,6 +449,8 @@
   std::unique_ptr<const FeatureProcessor> selection_feature_processor_;
   std::unique_ptr<const FeatureProcessor> classification_feature_processor_;
 
+  std::unique_ptr<const grammar::Analyzer> analyzer_;
+  std::unique_ptr<const DatetimeGrounder> datetime_grounder_;
   std::unique_ptr<const DatetimeParser> datetime_parser_;
   std::unique_ptr<const GrammarAnnotator> grammar_annotator_;
 
diff --git a/native/annotator/annotator_jni.cc b/native/annotator/annotator_jni.cc
index 7f095f9..6e7eeab 100644
--- a/native/annotator/annotator_jni.cc
+++ b/native/annotator/annotator_jni.cc
@@ -876,10 +876,12 @@
   const Annotator* model = reinterpret_cast<AnnotatorJniContext*>(ptr)->model();
   TC3_ASSIGN_OR_RETURN_NULL(const std::string id_utf8,
                             JStringToUtf8String(env, id));
-  std::string serialized_knowledge_result;
-  if (!model->LookUpKnowledgeEntity(id_utf8, &serialized_knowledge_result)) {
+  auto serialized_knowledge_result_so = model->LookUpKnowledgeEntity(id_utf8);
+  if (!serialized_knowledge_result_so.ok()) {
     return nullptr;
   }
+  std::string serialized_knowledge_result =
+      serialized_knowledge_result_so.ValueOrDie();
 
   TC3_ASSIGN_OR_RETURN_NULL(
       ScopedLocalRef<jbyteArray> result,
diff --git a/native/annotator/annotator_test-include.cc b/native/annotator/annotator_test-include.cc
index b852827..3ecc201 100644
--- a/native/annotator/annotator_test-include.cc
+++ b/native/annotator/annotator_test-include.cc
@@ -49,6 +49,14 @@
   return GetModelPath() + "test_vocab_model.fb";
 }
 
+std::string GetTestModelWithDatetimeRegEx() {
+  std::string model_buffer = ReadFile(GetTestModelPath());
+  model_buffer = ModifyAnnotatorModel(model_buffer, [](ModelT* model) {
+    model->datetime_grammar_model.reset(nullptr);
+  });
+  return model_buffer;
+}
+
 void ExpectFirstEntityIsMoney(const std::vector<AnnotatedSpan>& result,
                               const std::string& currency,
                               const std::string& amount, const int whole_part,
@@ -1326,12 +1334,12 @@
   VerifyAnnotatesDurationsInRawMode(classifier.get());
 }
 
-TEST_F(AnnotatorTest, DurationAndRelativeTimeCanOverlapInRawMode) {
-  std::unique_ptr<Annotator> classifier = Annotator::FromPath(
-      GetTestModelPath(), unilib_.get(), calendarlib_.get());
+void VerifyDurationAndRelativeTimeCanOverlapInRawMode(
+    const Annotator* classifier) {
   ASSERT_TRUE(classifier);
   AnnotationOptions options;
   options.annotation_usecase = AnnotationUsecase_ANNOTATION_USECASE_RAW;
+  options.locales = "en";
 
   const std::vector<AnnotatedSpan> annotations =
       classifier->Annotate("let's meet in 3 hours", options);
@@ -1340,9 +1348,24 @@
               Contains(IsDatetimeSpan(/*start=*/11, /*end=*/21,
                                       /*time_ms_utc=*/10800000L,
                                       DatetimeGranularity::GRANULARITY_HOUR)));
-  EXPECT_THAT(annotations, Contains(IsDurationSpan(
-                               /*start=*/14, /*end=*/21,
-                               /*duration_ms=*/3 * 60 * 60 * 1000)));
+  EXPECT_THAT(annotations,
+              Contains(IsDurationSpan(/*start=*/14, /*end=*/21,
+                                      /*duration_ms=*/3 * 60 * 60 * 1000)));
+}
+
+TEST_F(AnnotatorTest, DurationAndRelativeTimeCanOverlapInRawMode) {
+  std::unique_ptr<Annotator> classifier = Annotator::FromPath(
+      GetTestModelPath(), unilib_.get(), calendarlib_.get());
+  VerifyDurationAndRelativeTimeCanOverlapInRawMode(classifier.get());
+}
+
+TEST_F(AnnotatorTest,
+       DurationAndRelativeTimeCanOverlapInRawModeWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyDurationAndRelativeTimeCanOverlapInRawMode(classifier.get());
 }
 
 TEST_F(AnnotatorTest, AnnotateSplitLines) {
@@ -1610,6 +1633,7 @@
   EXPECT_TRUE(classifier);
   ClassificationOptions options;
   options.reference_timezone = "Europe/Zurich";
+  options.locales = "en";
 
   std::vector<ClassificationResult> result =
       classifier->ClassifyText("january 1, 2017", {0, 15}, options);
@@ -1625,10 +1649,19 @@
   VerifyClassifyTextDateInZurichTimezone(classifier.get());
 }
 
+TEST_F(AnnotatorTest, ClassifyTextDateInZurichTimezoneWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyClassifyTextDateInZurichTimezone(classifier.get());
+}
+
 void VerifyClassifyTextDateInLATimezone(const Annotator* classifier) {
   EXPECT_TRUE(classifier);
   ClassificationOptions options;
   options.reference_timezone = "America/Los_Angeles";
+  options.locales = "en";
 
   std::vector<ClassificationResult> result =
       classifier->ClassifyText("march 1, 2017", {0, 13}, options);
@@ -1638,6 +1671,14 @@
                                        DatetimeGranularity::GRANULARITY_DAY)));
 }
 
+TEST_F(AnnotatorTest, ClassifyTextDateInLATimezoneWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyClassifyTextDateInLATimezone(classifier.get());
+}
+
 TEST_F(AnnotatorTest, ClassifyTextDateInLATimezone) {
   std::unique_ptr<Annotator> classifier = Annotator::FromPath(
       GetTestModelPath(), unilib_.get(), calendarlib_.get());
@@ -1648,6 +1689,7 @@
   EXPECT_TRUE(classifier);
   ClassificationOptions options;
   options.reference_timezone = "Europe/Zurich";
+  options.locales = "en";
 
   std::vector<ClassificationResult> result = classifier->ClassifyText(
       "hello world this is the first line\n"
@@ -1659,6 +1701,14 @@
                                        DatetimeGranularity::GRANULARITY_DAY)));
 }
 
+TEST_F(AnnotatorTest, ClassifyTextDateOnAotherLineWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyClassifyTextDateOnAotherLine(classifier.get());
+}
+
 TEST_F(AnnotatorTest, ClassifyTextDateOnAotherLine) {
   std::unique_ptr<Annotator> classifier = Annotator::FromPath(
       GetTestModelPath(), unilib_.get(), calendarlib_.get());
@@ -1687,9 +1737,20 @@
   VerifyClassifyTextWhenLocaleUSParsesDateAsMonthDay(classifier.get());
 }
 
+TEST_F(AnnotatorTest,
+       ClassifyTextWhenLocaleUSParsesDateAsMonthDayWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyClassifyTextWhenLocaleUSParsesDateAsMonthDay(classifier.get());
+}
+
 TEST_F(AnnotatorTest, ClassifyTextWhenLocaleGermanyParsesDateAsMonthDay) {
-  std::unique_ptr<Annotator> classifier = Annotator::FromPath(
-      GetTestModelPath(), unilib_.get(), calendarlib_.get());
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
   EXPECT_TRUE(classifier);
   std::vector<ClassificationResult> result;
   ClassificationOptions options;
@@ -1741,7 +1802,7 @@
 }
 
 TEST_F(AnnotatorTest, SuggestTextDateDisabled) {
-  const std::string test_model = ReadFile(GetTestModelPath());
+  std::string test_model = GetTestModelWithDatetimeRegEx();
   std::unique_ptr<ModelT> unpacked_model = UnPackModel(test_model.c_str());
 
   // Disable the patterns for selection.
@@ -2090,14 +2151,23 @@
                                    std::string(50000, ' ');
     const int value_length = type_value_pair.second.size();
 
-    EXPECT_THAT(classifier->Annotate(input_100k),
+    AnnotationOptions annotation_options;
+    annotation_options.locales = "en";
+    EXPECT_THAT(classifier->Annotate(input_100k, annotation_options),
                 ElementsAreArray({IsAnnotatedSpan(50000, 50000 + value_length,
                                                   type_value_pair.first)}));
-    EXPECT_EQ(classifier->SuggestSelection(input_100k, {50000, 50001}),
+    SelectionOptions selection_options;
+    selection_options.locales = "en";
+    EXPECT_EQ(classifier->SuggestSelection(input_100k, {50000, 50001},
+                                           selection_options),
               CodepointSpan(50000, 50000 + value_length));
+
+    ClassificationOptions classification_options;
+    classification_options.locales = "en";
     EXPECT_EQ(type_value_pair.first,
               FirstResult(classifier->ClassifyText(
-                  input_100k, {50000, 50000 + value_length})));
+                  input_100k, {50000, 50000 + value_length},
+                  classification_options)));
   }
 }
 
@@ -2107,6 +2177,14 @@
   VerifyLongInput(classifier.get());
 }
 
+TEST_F(AnnotatorTest, LongInputWithRegExDatetime) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyLongInput(classifier.get());
+}
+
 // These coarse tests are there only to make sure the execution happens in
 // reasonable amount of time.
 TEST_F(AnnotatorTest, LongInputNoResultCheck) {
@@ -2484,6 +2562,7 @@
   EXPECT_TRUE(classifier);
   std::vector<ClassificationResult> result;
   ClassificationOptions options;
+  options.locales = "en-US";
 
   result = classifier->ClassifyText("03.05.1970 00:00am", {0, 18}, options);
 
@@ -2553,11 +2632,20 @@
   VerifyClassifyTextOutputsDatetimeEntityData(classifier.get());
 }
 
+TEST_F(AnnotatorTest, ClassifyTextOutputsDatetimeEntityDataWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyClassifyTextOutputsDatetimeEntityData(classifier.get());
+}
+
 void VerifyAnnotateOutputsDatetimeEntityData(const Annotator* classifier) {
   EXPECT_TRUE(classifier);
   std::vector<AnnotatedSpan> result;
   AnnotationOptions options;
   options.is_serialized_entity_data_enabled = true;
+  options.locales = "en";
 
   result = classifier->Annotate("September 1, 2019", options);
 
@@ -2605,7 +2693,19 @@
   VerifyAnnotateOutputsDatetimeEntityData(classifier.get());
 }
 
+TEST_F(AnnotatorTest, AnnotateOutputsDatetimeEntityDataWithDatatimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyAnnotateOutputsDatetimeEntityData(classifier.get());
+}
+
 TEST_F(AnnotatorTest, AnnotateOutputsMoneyEntityData) {
+  // std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  // std::unique_ptr<Annotator> classifier =
+  //     Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+  //                                  unilib_.get(), calendarlib_.get());
   std::unique_ptr<Annotator> classifier = Annotator::FromPath(
       GetTestModelPath(), unilib_.get(), calendarlib_.get());
   EXPECT_TRUE(classifier);
@@ -2696,8 +2796,8 @@
                            /*amount=*/"3,000.00", /*whole_part=*/3000,
                            /*decimal_part=*/0, /*nanos=*/0);
 
-  ExpectFirstEntityIsMoney(classifier->Annotate("1.2 CHF", options), "CHF",
-                           /*amount=*/"1.2", /*whole_part=*/1,
+  ExpectFirstEntityIsMoney(classifier->Annotate("1.2 CHF", options), "CHF",
+                           /*amount=*/"1.2", /*whole_part=*/1,
                            /*decimal_part=*/2, /*nanos=*/200000000);
   ExpectFirstEntityIsMoney(classifier->Annotate("CHF1.2", options), "CHF",
                            /*amount=*/"1.2", /*whole_part=*/1,
@@ -2757,9 +2857,10 @@
       {.text = "...was born on 13/12/1989."},
   };
 
+  AnnotationOptions annotation_options;
+  annotation_options.locales = "en";
   StatusOr<Annotations> annotations_status =
-      classifier->AnnotateStructuredInput(string_fragments,
-                                          AnnotationOptions());
+      classifier->AnnotateStructuredInput(string_fragments, annotation_options);
   ASSERT_TRUE(annotations_status.ok());
   Annotations annotations = annotations_status.ValueOrDie();
   ASSERT_EQ(annotations.annotated_spans.size(), 2);
@@ -2769,11 +2870,10 @@
               ElementsAreArray({IsAnnotatedSpan(15, 25, "date")}));
 }
 
-TEST_F(AnnotatorTest, InputFragmentTimestampOverridesAnnotationOptions) {
-  std::unique_ptr<Annotator> classifier = Annotator::FromPath(
-      GetTestModelPath(), unilib_.get(), calendarlib_.get());
-
+void VerifyInputFragmentTimestampOverridesAnnotationOptions(
+    const Annotator* classifier) {
   AnnotationOptions annotation_options;
+  annotation_options.locales = "en";
   annotation_options.reference_time_ms_utc =
       1554465190000;                             // 04/05/2019 11:53 am
   int64 fragment_reference_time = 946727580000;  // 01/01/2000 11:53 am
@@ -2799,9 +2899,23 @@
                   DatetimeGranularity::GRANULARITY_MINUTE)}));
 }
 
-TEST_F(AnnotatorTest, InputFragmentTimezoneOverridesAnnotationOptions) {
+TEST_F(AnnotatorTest,
+       InputFragmentTimestampOverridesAnnotationOptionsWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyInputFragmentTimestampOverridesAnnotationOptions(classifier.get());
+}
+
+TEST_F(AnnotatorTest, InputFragmentTimestampOverridesAnnotationOptions) {
   std::unique_ptr<Annotator> classifier = Annotator::FromPath(
       GetTestModelPath(), unilib_.get(), calendarlib_.get());
+  VerifyInputFragmentTimestampOverridesAnnotationOptions(classifier.get());
+}
+
+void VerifyInputFragmentTimezoneOverridesAnnotationOptions(
+    const Annotator* classifier) {
   std::vector<InputFragment> string_fragments = {
       {.text = "11/12/2020 17:20"},
       {
@@ -2809,9 +2923,10 @@
           .datetime_options = Optional<DatetimeOptions>(
               {.reference_timezone = "Europe/Zurich"}),
       }};
+  AnnotationOptions annotation_options;
+  annotation_options.locales = "en-US";
   StatusOr<Annotations> annotations_status =
-      classifier->AnnotateStructuredInput(string_fragments,
-                                          AnnotationOptions());
+      classifier->AnnotateStructuredInput(string_fragments, annotation_options);
   ASSERT_TRUE(annotations_status.ok());
   Annotations annotations = annotations_status.ValueOrDie();
   ASSERT_EQ(annotations.annotated_spans.size(), 2);
@@ -2825,6 +2940,21 @@
                   DatetimeGranularity::GRANULARITY_MINUTE)}));
 }
 
+TEST_F(AnnotatorTest, InputFragmentTimezoneOverridesAnnotationOptions) {
+  std::unique_ptr<Annotator> classifier = Annotator::FromPath(
+      GetTestModelPath(), unilib_.get(), calendarlib_.get());
+  VerifyInputFragmentTimezoneOverridesAnnotationOptions(classifier.get());
+}
+
+TEST_F(AnnotatorTest,
+       InputFragmentTimezoneOverridesAnnotationOptionsWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyInputFragmentTimezoneOverridesAnnotationOptions(classifier.get());
+}
+
 namespace {
 void AddDummyRegexDatetimeModel(ModelT* unpacked_model) {
   unpacked_model->datetime_model.reset(new DatetimeModelT);
@@ -2909,9 +3039,8 @@
               IsEmpty());
 }
 
-TEST_F(AnnotatorTest, AnnotateSupportsPointwiseCollectionFilteringInRawMode) {
-  std::unique_ptr<Annotator> classifier = Annotator::FromPath(
-      GetTestModelPath(), unilib_.get(), calendarlib_.get());
+void VerifyAnnotateSupportsPointwiseCollectionFilteringInRawMode(
+    const Annotator* classifier) {
   ASSERT_TRUE(classifier);
   struct Example {
     std::string collection;
@@ -2942,6 +3071,7 @@
 
   for (const Example& example : examples) {
     AnnotationOptions options;
+    options.locales = "en";
     options.annotation_usecase = AnnotationUsecase_ANNOTATION_USECASE_RAW;
     options.entity_types.insert(example.collection);
 
@@ -2952,9 +3082,23 @@
   }
 }
 
+TEST_F(AnnotatorTest, AnnotateSupportsPointwiseCollectionFilteringInRawMode) {
+  std::unique_ptr<Annotator> classifier = Annotator::FromPath(
+      GetTestModelPath(), unilib_.get(), calendarlib_.get());
+  VerifyAnnotateSupportsPointwiseCollectionFilteringInRawMode(classifier.get());
+}
+
+TEST_F(AnnotatorTest,
+       AnnotateSupportsPointwiseCollectionFilteringInRawModeWithDatetimeRegEx) {
+  std::string model_buffer = GetTestModelWithDatetimeRegEx();
+  std::unique_ptr<Annotator> classifier =
+      Annotator::FromUnownedBuffer(model_buffer.data(), model_buffer.size(),
+                                   unilib_.get(), calendarlib_.get());
+  VerifyAnnotateSupportsPointwiseCollectionFilteringInRawMode(classifier.get());
+}
+
 TEST_F(AnnotatorTest, InitializeFromString) {
   const std::string test_model = ReadFile(GetTestModelPath());
-
   std::unique_ptr<Annotator> classifier =
       Annotator::FromString(test_model, unilib_.get(), calendarlib_.get());
   ASSERT_TRUE(classifier);
diff --git a/native/annotator/datetime/datetime-grounder.cc b/native/annotator/datetime/datetime-grounder.cc
new file mode 100644
index 0000000..7d5f440
--- /dev/null
+++ b/native/annotator/datetime/datetime-grounder.cc
@@ -0,0 +1,273 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/datetime/datetime-grounder.h"
+
+#include <limits>
+#include <unordered_map>
+#include <vector>
+
+#include "annotator/datetime/datetime_generated.h"
+#include "annotator/datetime/utils.h"
+#include "annotator/types.h"
+#include "utils/base/integral_types.h"
+#include "utils/base/status.h"
+#include "utils/base/status_macros.h"
+
+using ::libtextclassifier3::grammar::datetime::AbsoluteDateTime;
+using ::libtextclassifier3::grammar::datetime::ComponentType;
+using ::libtextclassifier3::grammar::datetime::Meridiem;
+using ::libtextclassifier3::grammar::datetime::RelativeDateTime;
+using ::libtextclassifier3::grammar::datetime::RelativeDatetimeComponent;
+using ::libtextclassifier3::grammar::datetime::UngroundedDatetime;
+using ::libtextclassifier3::grammar::datetime::RelativeDatetimeComponent_::
+    Modifier;
+
+namespace libtextclassifier3 {
+
+namespace {
+
+const std::unordered_map<int, int> kMonthDefaultLastDayMap(
+    {{/*no_month*/ 0, 31},
+     {/*January*/ 1, 31},
+     {/*Febuary*/ 2, 29},
+     {/*March*/ 3, 31},
+     {/*April*/ 4, 30},
+     {/*May*/ 5, 31},
+     {/*June*/ 6, 30},
+     {/*July*/ 7, 31},
+     {/*August*/ 8, 31},
+     {/*September*/ 9, 30},
+     {/*October*/ 10, 31},
+     {/*November*/ 11, 30},
+     {/*December*/ 12, 31}});
+
+bool IsValidDatetime(const AbsoluteDateTime* absolute_datetime) {
+  // Sanity Checks.
+  if (absolute_datetime->minute() > 59 || absolute_datetime->second() > 59 ||
+      absolute_datetime->hour() > 23 || absolute_datetime->month() > 12 ||
+      absolute_datetime->month() == 0) {
+    return false;
+  }
+  if (absolute_datetime->day() >= 0) {
+    int min_day_value = 1;
+    int max_day_value = 31;
+    if (absolute_datetime->month() >= 0 && absolute_datetime->month() <= 12) {
+      max_day_value = kMonthDefaultLastDayMap.at(absolute_datetime->month());
+      if (absolute_datetime->day() < min_day_value ||
+          absolute_datetime->day() > max_day_value) {
+        return false;
+      }
+    }
+  }
+  return true;
+}
+
+bool IsValidDatetime(const RelativeDateTime* relative_datetime) {
+  if (relative_datetime->base()) {
+    return IsValidDatetime(relative_datetime->base());
+  }
+  return true;
+}
+
+StatusOr<DatetimeComponent::RelativeQualifier> ToRelativeQualifier(
+    const Modifier& modifier) {
+  switch (modifier) {
+    case Modifier::Modifier_THIS:
+      return DatetimeComponent::RelativeQualifier::THIS;
+    case Modifier::Modifier_LAST:
+      return DatetimeComponent::RelativeQualifier::LAST;
+    case Modifier::Modifier_NEXT:
+      return DatetimeComponent::RelativeQualifier::NEXT;
+    case Modifier::Modifier_NOW:
+      return DatetimeComponent::RelativeQualifier::NOW;
+    case Modifier::Modifier_TOMORROW:
+      return DatetimeComponent::RelativeQualifier::TOMORROW;
+    case Modifier::Modifier_YESTERDAY:
+      return DatetimeComponent::RelativeQualifier::YESTERDAY;
+    case Modifier::Modifier_PAST:
+      return DatetimeComponent::RelativeQualifier::PAST;
+    case Modifier::Modifier_FUTURE:
+      return DatetimeComponent::RelativeQualifier::FUTURE;
+    case Modifier::Modifier_UNSPECIFIED:
+      return DatetimeComponent::RelativeQualifier::UNSPECIFIED;
+    default:
+      return Status(StatusCode::INTERNAL,
+                    "Couldn't parse the Modifier to RelativeQualifier.");
+  }
+}
+
+StatusOr<DatetimeComponent::ComponentType> ToComponentType(
+    const grammar::datetime::ComponentType component_type) {
+  switch (component_type) {
+    case grammar::datetime::ComponentType_YEAR:
+      return DatetimeComponent::ComponentType::YEAR;
+    case grammar::datetime::ComponentType_MONTH:
+      return DatetimeComponent::ComponentType::MONTH;
+    case grammar::datetime::ComponentType_WEEK:
+      return DatetimeComponent::ComponentType::WEEK;
+    case grammar::datetime::ComponentType_DAY_OF_WEEK:
+      return DatetimeComponent::ComponentType::DAY_OF_WEEK;
+    case grammar::datetime::ComponentType_DAY_OF_MONTH:
+      return DatetimeComponent::ComponentType::DAY_OF_MONTH;
+    case grammar::datetime::ComponentType_HOUR:
+      return DatetimeComponent::ComponentType::HOUR;
+    case grammar::datetime::ComponentType_MINUTE:
+      return DatetimeComponent::ComponentType::MINUTE;
+    case grammar::datetime::ComponentType_SECOND:
+      return DatetimeComponent::ComponentType::SECOND;
+    case grammar::datetime::ComponentType_MERIDIEM:
+      return DatetimeComponent::ComponentType::MERIDIEM;
+    case grammar::datetime::ComponentType_UNSPECIFIED:
+      return DatetimeComponent::ComponentType::UNSPECIFIED;
+    default:
+      return Status(StatusCode::INTERNAL,
+                    "Couldn't parse the DatetimeComponent's ComponentType from "
+                    "grammar's datetime ComponentType.");
+  }
+}
+
+void FillAbsoluteDateTimeComponents(
+    const grammar::datetime::AbsoluteDateTime* absolute_datetime,
+    DatetimeParsedData* datetime_parsed_data) {
+  if (absolute_datetime->year() >= 0) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::YEAR,
+        GetAdjustedYear(absolute_datetime->year()));
+  }
+  if (absolute_datetime->month() >= 0) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::MONTH, absolute_datetime->month());
+  }
+  if (absolute_datetime->day() >= 0) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::DAY_OF_MONTH,
+        absolute_datetime->day());
+  }
+  if (absolute_datetime->week_day() >= 0) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::DAY_OF_WEEK,
+        absolute_datetime->week_day());
+  }
+  if (absolute_datetime->hour() >= 0) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::HOUR, absolute_datetime->hour());
+  }
+  if (absolute_datetime->minute() >= 0) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::MINUTE, absolute_datetime->minute());
+  }
+  if (absolute_datetime->second() >= 0) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::SECOND, absolute_datetime->second());
+  }
+  if (absolute_datetime->meridiem() != grammar::datetime::Meridiem_UNKNOWN) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::MERIDIEM,
+        absolute_datetime->meridiem() == grammar::datetime::Meridiem_AM ? 0
+                                                                        : 1);
+  }
+  if (absolute_datetime->time_zone()) {
+    datetime_parsed_data->SetAbsoluteValue(
+        DatetimeComponent::ComponentType::ZONE_OFFSET,
+        absolute_datetime->time_zone()->utc_offset_mins());
+  }
+}
+
+StatusOr<DatetimeParsedData> FillRelativeDateTimeComponents(
+    const grammar::datetime::RelativeDateTime* relative_datetime) {
+  DatetimeParsedData datetime_parsed_data;
+  for (const RelativeDatetimeComponent* relative_component :
+       *relative_datetime->relative_datetime_component()) {
+    TC3_ASSIGN_OR_RETURN(const DatetimeComponent::ComponentType component_type,
+                         ToComponentType(relative_component->component_type()));
+    datetime_parsed_data.SetRelativeCount(component_type,
+                                          relative_component->value());
+    TC3_ASSIGN_OR_RETURN(
+        const DatetimeComponent::RelativeQualifier relative_qualifier,
+        ToRelativeQualifier(relative_component->modifier()));
+    datetime_parsed_data.SetRelativeValue(component_type, relative_qualifier);
+  }
+  if (relative_datetime->base()) {
+    FillAbsoluteDateTimeComponents(relative_datetime->base(),
+                                   &datetime_parsed_data);
+  }
+  return datetime_parsed_data;
+}
+
+}  // namespace
+
+DatetimeGrounder::DatetimeGrounder(const CalendarLib* calendarlib)
+    : calendarlib_(*calendarlib) {}
+
+StatusOr<std::vector<DatetimeParseResult>> DatetimeGrounder::Ground(
+    const int64 reference_time_ms_utc, const std::string& reference_timezone,
+    const std::string& reference_locale,
+    const grammar::datetime::UngroundedDatetime* ungrounded_datetime) const {
+  DatetimeParsedData datetime_parsed_data;
+  if (ungrounded_datetime->absolute_datetime()) {
+    FillAbsoluteDateTimeComponents(ungrounded_datetime->absolute_datetime(),
+                                   &datetime_parsed_data);
+  } else if (ungrounded_datetime->relative_datetime()) {
+    TC3_ASSIGN_OR_RETURN(datetime_parsed_data,
+                         FillRelativeDateTimeComponents(
+                             ungrounded_datetime->relative_datetime()));
+  }
+  std::vector<DatetimeParsedData> interpretations;
+  FillInterpretations(datetime_parsed_data,
+                      calendarlib_.GetGranularity(datetime_parsed_data),
+                      &interpretations);
+  std::vector<DatetimeParseResult> datetime_parse_result;
+
+  for (const DatetimeParsedData& interpretation : interpretations) {
+    std::vector<DatetimeComponent> date_components;
+    interpretation.GetDatetimeComponents(&date_components);
+    DatetimeParseResult result;
+    // Text classifier only provides ambiguity limited to “AM/PM” which is
+    // encoded in the pair of DatetimeParseResult; both corresponding to the
+    // same date, but one corresponding to “AM” and the other one corresponding
+    //  to “PM”.
+    if (!calendarlib_.InterpretParseData(
+            interpretation, reference_time_ms_utc, reference_timezone,
+            reference_locale, /*prefer_future_for_unspecified_date=*/true,
+            &(result.time_ms_utc), &(result.granularity))) {
+      return Status(
+          StatusCode::INTERNAL,
+          "Couldn't parse the UngroundedDatetime to DatetimeParseResult.");
+    }
+
+    // Sort the date time units by component type.
+    std::sort(date_components.begin(), date_components.end(),
+              [](DatetimeComponent a, DatetimeComponent b) {
+                return a.component_type > b.component_type;
+              });
+    result.datetime_components.swap(date_components);
+    datetime_parse_result.push_back(result);
+  }
+  return datetime_parse_result;
+}
+
+bool DatetimeGrounder::IsValidUngroundedDatetime(
+    const UngroundedDatetime* ungrounded_datetime) const {
+  if (ungrounded_datetime->absolute_datetime()) {
+    return IsValidDatetime(ungrounded_datetime->absolute_datetime());
+  } else if (ungrounded_datetime->relative_datetime()) {
+    return IsValidDatetime(ungrounded_datetime->relative_datetime());
+  }
+  return false;
+}
+
+}  // namespace libtextclassifier3
diff --git a/native/annotator/datetime/datetime-grounder.h b/native/annotator/datetime/datetime-grounder.h
new file mode 100644
index 0000000..6a6f5e4
--- /dev/null
+++ b/native/annotator/datetime/datetime-grounder.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ANNOTATOR_DATETIME_DATETIME_GROUNDER_H_
+#define LIBTEXTCLASSIFIER_ANNOTATOR_DATETIME_DATETIME_GROUNDER_H_
+
+#include <vector>
+
+#include "annotator/datetime/datetime_generated.h"
+#include "annotator/types.h"
+#include "utils/base/statusor.h"
+#include "utils/calendar/calendar.h"
+
+namespace libtextclassifier3 {
+
+// Utility class to resolve and complete an ungrounded datetime specification.
+class DatetimeGrounder {
+ public:
+  explicit DatetimeGrounder(const CalendarLib* calendarlib);
+
+  // Resolves ambiguities and produces concrete datetime results from an
+  // ungrounded datetime specification.
+  StatusOr<std::vector<DatetimeParseResult>> Ground(
+      const int64 reference_time_ms_utc, const std::string& reference_timezone,
+      const std::string& reference_locale,
+      const grammar::datetime::UngroundedDatetime* ungrounded_datetime) const;
+
+  bool IsValidUngroundedDatetime(
+      const grammar::datetime::UngroundedDatetime* ungrounded_datetime) const;
+
+ private:
+  const CalendarLib& calendarlib_;
+};
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ANNOTATOR_DATETIME_DATETIME_GROUNDER_H_
diff --git a/native/annotator/datetime/datetime-grounder_test.cc b/native/annotator/datetime/datetime-grounder_test.cc
new file mode 100644
index 0000000..121aae8
--- /dev/null
+++ b/native/annotator/datetime/datetime-grounder_test.cc
@@ -0,0 +1,292 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/datetime/datetime-grounder.h"
+
+#include "annotator/datetime/datetime_generated.h"
+#include "utils/flatbuffers/flatbuffers.h"
+#include "utils/jvm-test-utils.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+
+using ::libtextclassifier3::grammar::datetime::AbsoluteDateTimeT;
+using ::libtextclassifier3::grammar::datetime::ComponentType;
+using ::libtextclassifier3::grammar::datetime::Meridiem;
+using ::libtextclassifier3::grammar::datetime::RelativeDatetimeComponentT;
+using ::libtextclassifier3::grammar::datetime::RelativeDateTimeT;
+using ::libtextclassifier3::grammar::datetime::TimeZoneT;
+using ::libtextclassifier3::grammar::datetime::UngroundedDatetime;
+using ::libtextclassifier3::grammar::datetime::UngroundedDatetimeT;
+using ::libtextclassifier3::grammar::datetime::RelativeDatetimeComponent_::
+    Modifier;
+using ::testing::SizeIs;
+
+namespace libtextclassifier3 {
+
+class DatetimeGrounderTest : public testing::Test {
+ public:
+  void SetUp() override {
+    calendarlib_ = CreateCalendarLibForTesting();
+    datetime_grounder_.reset(new DatetimeGrounder(calendarlib_.get()));
+  }
+
+ protected:
+  OwnedFlatbuffer<UngroundedDatetime, std::string> BuildAbsoluteDatetime(
+      const int year, const int month, const int day, const int hour,
+      const int minute, const int second, const Meridiem meridiem) {
+    grammar::datetime::UngroundedDatetimeT ungrounded_datetime;
+    ungrounded_datetime.absolute_datetime.reset(new AbsoluteDateTimeT);
+
+    // Set absolute datetime value.
+    ungrounded_datetime.absolute_datetime->year = year;
+    ungrounded_datetime.absolute_datetime->month = month;
+    ungrounded_datetime.absolute_datetime->day = day;
+    ungrounded_datetime.absolute_datetime->hour = hour;
+    ungrounded_datetime.absolute_datetime->minute = minute;
+    ungrounded_datetime.absolute_datetime->second = second;
+    ungrounded_datetime.absolute_datetime->meridiem = meridiem;
+
+    return OwnedFlatbuffer<UngroundedDatetime, std::string>(
+        PackFlatbuffer<UngroundedDatetime>(&ungrounded_datetime));
+  }
+
+  OwnedFlatbuffer<UngroundedDatetime, std::string> BuildRelativeDatetime(
+      const ComponentType component_type, const Modifier modifier,
+      const int relative_count) {
+    UngroundedDatetimeT ungrounded_datetime;
+    ungrounded_datetime.relative_datetime.reset(new RelativeDateTimeT);
+    ungrounded_datetime.relative_datetime->relative_datetime_component
+        .emplace_back(new RelativeDatetimeComponentT);
+    ungrounded_datetime.relative_datetime->relative_datetime_component.back()
+        ->modifier = modifier;
+    ungrounded_datetime.relative_datetime->relative_datetime_component.back()
+        ->component_type = component_type;
+    ungrounded_datetime.relative_datetime->relative_datetime_component.back()
+        ->value = relative_count;
+    ungrounded_datetime.relative_datetime->base.reset(new AbsoluteDateTimeT);
+    ungrounded_datetime.relative_datetime->base->year = 2020;
+    ungrounded_datetime.relative_datetime->base->month = 6;
+    ungrounded_datetime.relative_datetime->base->day = 30;
+
+    return OwnedFlatbuffer<UngroundedDatetime, std::string>(
+        PackFlatbuffer<UngroundedDatetime>(&ungrounded_datetime));
+  }
+
+  void VerifyValidUngroundedDatetime(
+      const UngroundedDatetime* ungrounded_datetime) {
+    EXPECT_TRUE(
+        datetime_grounder_->IsValidUngroundedDatetime(ungrounded_datetime));
+  }
+
+  void VerifyInValidUngroundedDatetime(
+      const UngroundedDatetime* ungrounded_datetime) {
+    EXPECT_FALSE(
+        datetime_grounder_->IsValidUngroundedDatetime(ungrounded_datetime));
+  }
+
+  std::unique_ptr<DatetimeGrounder> datetime_grounder_;
+  std::unique_ptr<CalendarLib> calendarlib_;
+};
+
+TEST_F(DatetimeGrounderTest, AbsoluteDatetimeTest) {
+  const OwnedFlatbuffer<UngroundedDatetime, std::string> datetime =
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/03, /*day=*/30,
+                            /*hour=*/11, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM);
+  const std::vector<DatetimeParseResult> data =
+      datetime_grounder_
+          ->Ground(
+              /*reference_time_ms_utc=*/0, "Europe/Zurich", "en-US",
+              datetime.get())
+          .ValueOrDie();
+
+  EXPECT_THAT(data, SizeIs(1));
+  EXPECT_EQ(data[0].granularity, DatetimeGranularity::GRANULARITY_SECOND);
+
+  // Meridiem
+  EXPECT_EQ(data[0].datetime_components[0].component_type,
+            DatetimeComponent::ComponentType::MERIDIEM);
+  EXPECT_EQ(data[0].datetime_components[0].value, 0);
+
+  EXPECT_EQ(data[0].datetime_components[1].component_type,
+            DatetimeComponent::ComponentType::SECOND);
+  EXPECT_EQ(data[0].datetime_components[1].component_type,
+            DatetimeComponent::ComponentType::SECOND);
+
+  EXPECT_EQ(data[0].datetime_components[2].component_type,
+            DatetimeComponent::ComponentType::MINUTE);
+  EXPECT_EQ(data[0].datetime_components[2].value, 59);
+
+  EXPECT_EQ(data[0].datetime_components[3].component_type,
+            DatetimeComponent::ComponentType::HOUR);
+  EXPECT_EQ(data[0].datetime_components[3].value, 11);
+
+  EXPECT_EQ(data[0].datetime_components[4].component_type,
+            DatetimeComponent::ComponentType::DAY_OF_MONTH);
+  EXPECT_EQ(data[0].datetime_components[4].value, 30);
+
+  EXPECT_EQ(data[0].datetime_components[5].component_type,
+            DatetimeComponent::ComponentType::MONTH);
+  EXPECT_EQ(data[0].datetime_components[5].value, 3);
+
+  EXPECT_EQ(data[0].datetime_components[6].component_type,
+            DatetimeComponent::ComponentType::YEAR);
+  EXPECT_EQ(data[0].datetime_components[6].value, 2000);
+}
+
+TEST_F(DatetimeGrounderTest, InterpretDatetimeTest) {
+  const OwnedFlatbuffer<UngroundedDatetime, std::string> datetime =
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/03, /*day=*/30,
+                            /*hour=*/11, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_UNKNOWN);
+  const std::vector<DatetimeParseResult> data =
+      datetime_grounder_
+          ->Ground(
+              /*reference_time_ms_utc=*/0, "Europe/Zurich", "en-US",
+              datetime.get())
+          .ValueOrDie();
+
+  EXPECT_THAT(data, SizeIs(2));
+  EXPECT_EQ(data[0].granularity, DatetimeGranularity::GRANULARITY_SECOND);
+  EXPECT_EQ(data[1].granularity, DatetimeGranularity::GRANULARITY_SECOND);
+
+  // Check Meridiem's values
+  EXPECT_EQ(data[0].datetime_components[0].component_type,
+            DatetimeComponent::ComponentType::MERIDIEM);
+  EXPECT_EQ(data[0].datetime_components[0].value, 0);
+  EXPECT_EQ(data[1].datetime_components[0].component_type,
+            DatetimeComponent::ComponentType::MERIDIEM);
+  EXPECT_EQ(data[1].datetime_components[0].value, 1);
+}
+
+TEST_F(DatetimeGrounderTest, RelativeDatetimeTest) {
+  const OwnedFlatbuffer<UngroundedDatetime, std::string> datetime =
+      BuildRelativeDatetime(ComponentType::ComponentType_DAY_OF_MONTH,
+                            Modifier::Modifier_NEXT, 1);
+  const std::vector<DatetimeParseResult> data =
+      datetime_grounder_
+          ->Ground(
+              /*reference_time_ms_utc=*/0, "Europe/Zurich", "en-US",
+              datetime.get())
+          .ValueOrDie();
+
+  EXPECT_THAT(data, SizeIs(1));
+  EXPECT_EQ(data[0].granularity, DatetimeGranularity::GRANULARITY_DAY);
+
+  EXPECT_EQ(data[0].datetime_components[0].component_type,
+            DatetimeComponent::ComponentType::DAY_OF_MONTH);
+  EXPECT_EQ(data[0].datetime_components[0].relative_qualifier,
+            DatetimeComponent::RelativeQualifier::NEXT);
+  EXPECT_EQ(data[0].datetime_components[0].relative_count, 1);
+  EXPECT_EQ(data[0].datetime_components[1].component_type,
+            DatetimeComponent::ComponentType::MONTH);
+  EXPECT_EQ(data[0].datetime_components[2].component_type,
+            DatetimeComponent::ComponentType::YEAR);
+}
+
+TEST_F(DatetimeGrounderTest, TimeZoneTest) {
+  grammar::datetime::UngroundedDatetimeT ungrounded_datetime;
+  ungrounded_datetime.absolute_datetime.reset(new AbsoluteDateTimeT);
+  ungrounded_datetime.absolute_datetime->time_zone.reset(new TimeZoneT);
+  ungrounded_datetime.absolute_datetime->time_zone->utc_offset_mins = 120;
+  const OwnedFlatbuffer<UngroundedDatetime, std::string> timezone(
+      PackFlatbuffer<UngroundedDatetime>(&ungrounded_datetime));
+
+  const std::vector<DatetimeParseResult> data =
+      datetime_grounder_
+          ->Ground(
+              /*reference_time_ms_utc=*/0, "Europe/Zurich", "en-US",
+              timezone.get())
+          .ValueOrDie();
+
+  EXPECT_THAT(data, SizeIs(1));
+  EXPECT_EQ(data[0].granularity, DatetimeGranularity::GRANULARITY_UNKNOWN);
+  EXPECT_EQ(data[0].datetime_components[0].component_type,
+            DatetimeComponent::ComponentType::ZONE_OFFSET);
+  EXPECT_EQ(data[0].datetime_components[0].value, 120);
+}
+
+TEST_F(DatetimeGrounderTest, InValidUngroundedDatetime) {
+  VerifyInValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/23, /*day=*/30,
+                            /*hour=*/11, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyInValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/03, /*day=*/33,
+                            /*hour=*/11, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyInValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/02, /*day=*/30,
+                            /*hour=*/11, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyInValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/07, /*day=*/31,
+                            /*hour=*/24, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyInValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/02, /*day=*/28,
+                            /*hour=*/24, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyInValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/02, /*day=*/28,
+                            /*hour=*/11, /*minute=*/69, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyInValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/02, /*day=*/28,
+                            /*hour=*/11, /*minute=*/59, /*second=*/99,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyInValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/00, /*day=*/28,
+                            /*hour=*/11, /*minute=*/59, /*second=*/99,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+}
+
+TEST_F(DatetimeGrounderTest, ValidUngroundedDatetime) {
+  VerifyValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/2, /*day=*/29,
+                            /*hour=*/23, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/7, /*day=*/31,
+                            /*hour=*/23, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+
+  VerifyValidUngroundedDatetime(
+      BuildAbsoluteDatetime(/*year=*/2000, /*month=*/10, /*day=*/31,
+                            /*hour=*/23, /*minute=*/59, /*second=*/59,
+                            grammar::datetime::Meridiem_AM)
+          .get());
+}
+
+}  // namespace libtextclassifier3
diff --git a/native/annotator/datetime/datetime.fbs b/native/annotator/datetime/datetime.fbs
new file mode 100644
index 0000000..9a96bae
--- /dev/null
+++ b/native/annotator/datetime/datetime.fbs
@@ -0,0 +1,153 @@
+//
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Meridiem field.
+namespace libtextclassifier3.grammar.datetime;
+enum Meridiem : int {
+  UNKNOWN = 0,
+
+  // Ante meridiem: Before noon
+  AM = 1,
+
+  // Post meridiem: After noon
+  PM = 2,
+}
+
+// Enum represents a unit of date and time in the expression.
+// Next field: 10
+namespace libtextclassifier3.grammar.datetime;
+enum ComponentType : int {
+  UNSPECIFIED = 0,
+
+  // Year of the date seen in the text match.
+  YEAR = 1,
+
+  // Month of the year starting with January = 1.
+  MONTH = 2,
+
+  // Week (7 days).
+  WEEK = 3,
+
+  // Day of week, start of the week is Sunday &  its value is 1.
+  DAY_OF_WEEK = 4,
+
+  // Day of the month starting with 1.
+  DAY_OF_MONTH = 5,
+
+  // Hour of the day.
+  HOUR = 6,
+
+  // Minute of the hour with a range of 0-59.
+  MINUTE = 7,
+
+  // Seconds of the minute with a range of 0-59.
+  SECOND = 8,
+
+  // Meridiem field i.e. AM/PM.
+  MERIDIEM = 9,
+}
+
+namespace libtextclassifier3.grammar.datetime;
+table TimeZone {
+  // Offset from UTC/GTM in minutes.
+  utc_offset_mins:int;
+}
+
+namespace libtextclassifier3.grammar.datetime.RelativeDatetimeComponent_;
+enum Modifier : int {
+  UNSPECIFIED = 0,
+  NEXT = 1,
+  THIS = 2,
+  LAST = 3,
+  NOW = 4,
+  TOMORROW = 5,
+  YESTERDAY = 6,
+  PAST = 7,
+  FUTURE = 8,
+}
+
+// Message for representing the relative date-time component in date-time
+// expressions.
+// Next field: 4
+namespace libtextclassifier3.grammar.datetime;
+table RelativeDatetimeComponent {
+  component_type:ComponentType = UNSPECIFIED;
+  modifier:RelativeDatetimeComponent_.Modifier = UNSPECIFIED;
+  value:int;
+}
+
+// AbsoluteDateTime represents date-time expressions that is not ambiguous.
+// Next field: 11
+namespace libtextclassifier3.grammar.datetime;
+table AbsoluteDateTime {
+  // Year value of the date seen in the text match.
+  year:int = -1;
+
+  // Month value of the year starting with January = 1.
+  month:int = -1;
+
+  // Day value of the month starting with 1.
+  day:int = -1;
+
+  // Day of week, start of the week is Sunday and its value is 1.
+  week_day:int = -1;
+
+  // Hour value of the day.
+  hour:int = -1;
+
+  // Minute value of the hour with a range of 0-59.
+  minute:int = -1;
+
+  // Seconds value of the minute with a range of 0-59.
+  second:int = -1;
+
+  partial_second:double = -1;
+
+  // Meridiem field i.e. AM/PM.
+  meridiem:Meridiem;
+
+  time_zone:TimeZone;
+}
+
+// Message to represent relative datetime expressions.
+// It encode expressions
+// - Where modifier such as before/after shift the date e.g.[three days ago],
+// [2 days after March 1st].
+// - When prefix make the expression relative e.g. [next weekend],
+// [last Monday].
+// Next field: 3
+namespace libtextclassifier3.grammar.datetime;
+table RelativeDateTime {
+  relative_datetime_component:[RelativeDatetimeComponent];
+
+  // The base could be an absolute datetime point for example: "March 1", a
+  // relative datetime point, for example: "2 days before March 1"
+  base:AbsoluteDateTime;
+}
+
+// Datetime result.
+namespace libtextclassifier3.grammar.datetime;
+table UngroundedDatetime {
+  absolute_datetime:AbsoluteDateTime;
+  relative_datetime:RelativeDateTime;
+
+  // The annotation usecases.
+  // There are two modes.
+  // 1- SMART - Datetime results which are optimized for Smart select
+  // 2- RAW - Results are optimized for where annotates as much as possible.
+  annotation_usecases:uint = 4294967295;
+}
+
diff --git a/native/annotator/datetime/extractor.cc b/native/annotator/datetime/extractor.cc
index c42ddf0..867c886 100644
--- a/native/annotator/datetime/extractor.cc
+++ b/native/annotator/datetime/extractor.cc
@@ -16,6 +16,7 @@
 
 #include "annotator/datetime/extractor.h"
 
+#include "annotator/datetime/utils.h"
 #include "annotator/model_generated.h"
 #include "annotator/types.h"
 #include "utils/base/logging.h"
@@ -390,15 +391,7 @@
   if (!ParseDigits(input, parsed_year)) {
     return false;
   }
-
-  // Logic to decide if XX will be 20XX or 19XX
-  if (*parsed_year < 100) {
-    if (*parsed_year < 50) {
-      *parsed_year += 2000;
-    } else {
-      *parsed_year += 1900;
-    }
-  }
+  *parsed_year = GetAdjustedYear(*parsed_year);
 
   return true;
 }
diff --git a/native/annotator/datetime/grammar-parser.cc b/native/annotator/datetime/grammar-parser.cc
new file mode 100644
index 0000000..6d51c19
--- /dev/null
+++ b/native/annotator/datetime/grammar-parser.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/datetime/grammar-parser.h"
+
+#include <set>
+#include <unordered_set>
+
+#include "annotator/datetime/datetime-grounder.h"
+#include "annotator/types.h"
+#include "utils/grammar/analyzer.h"
+#include "utils/grammar/evaluated-derivation.h"
+#include "utils/grammar/parsing/derivation.h"
+
+using ::libtextclassifier3::grammar::EvaluatedDerivation;
+using ::libtextclassifier3::grammar::datetime::UngroundedDatetime;
+
+namespace libtextclassifier3 {
+
+GrammarDatetimeParser::GrammarDatetimeParser(
+    const grammar::Analyzer& analyzer,
+    const DatetimeGrounder& datetime_grounder,
+    const float target_classification_score, const float priority_score)
+    : analyzer_(analyzer),
+      datetime_grounder_(datetime_grounder),
+      target_classification_score_(target_classification_score),
+      priority_score_(priority_score) {}
+
+StatusOr<std::vector<DatetimeParseResultSpan>> GrammarDatetimeParser::Parse(
+    const std::string& input, const int64 reference_time_ms_utc,
+    const std::string& reference_timezone, const LocaleList& locale_list,
+    ModeFlag mode, AnnotationUsecase annotation_usecase,
+    bool anchor_start_end) const {
+  return Parse(UTF8ToUnicodeText(input, /*do_copy=*/false),
+               reference_time_ms_utc, reference_timezone, locale_list, mode,
+               annotation_usecase, anchor_start_end);
+}
+
+StatusOr<std::vector<DatetimeParseResultSpan>> GrammarDatetimeParser::Parse(
+    const UnicodeText& input, const int64 reference_time_ms_utc,
+    const std::string& reference_timezone, const LocaleList& locale_list,
+    ModeFlag mode, AnnotationUsecase annotation_usecase,
+    bool anchor_start_end) const {
+  std::vector<DatetimeParseResultSpan> results;
+  UnsafeArena arena(/*block_size=*/16 << 10);
+  std::vector<Locale> locales = locale_list.GetLocales();
+  // If the locale list is empty then datetime regex expression will still
+  // execute but in grammar based parser the rules are associated with local
+  // and engine will not run if the locale list is empty. In an unlikely
+  // scenario when locale is not mentioned fallback to en-*.
+  if (locales.empty()) {
+    locales.emplace_back(Locale::FromBCP47("en"));
+  }
+  TC3_ASSIGN_OR_RETURN(
+      const std::vector<EvaluatedDerivation> evaluated_derivations,
+      analyzer_.Parse(input, locales, &arena,
+                      /*deduplicate_derivations=*/false));
+
+  std::vector<EvaluatedDerivation> valid_evaluated_derivations;
+  for (const EvaluatedDerivation& evaluated_derivation :
+       evaluated_derivations) {
+    if (evaluated_derivation.value) {
+      if (evaluated_derivation.value->Has<flatbuffers::Table>()) {
+        const UngroundedDatetime* ungrounded_datetime =
+            evaluated_derivation.value->Table<UngroundedDatetime>();
+        if (datetime_grounder_.IsValidUngroundedDatetime(ungrounded_datetime)) {
+          valid_evaluated_derivations.emplace_back(evaluated_derivation);
+        }
+      }
+    }
+  }
+  valid_evaluated_derivations =
+      grammar::DeduplicateDerivations(valid_evaluated_derivations);
+  for (const EvaluatedDerivation& evaluated_derivation :
+       valid_evaluated_derivations) {
+    if (evaluated_derivation.value) {
+      if (evaluated_derivation.value->Has<flatbuffers::Table>()) {
+        const UngroundedDatetime* ungrounded_datetime =
+            evaluated_derivation.value->Table<UngroundedDatetime>();
+        if ((ungrounded_datetime->annotation_usecases() &
+             (1 << annotation_usecase)) == 0) {
+          continue;
+        }
+        const StatusOr<std::vector<DatetimeParseResult>>&
+            datetime_parse_results = datetime_grounder_.Ground(
+                reference_time_ms_utc, reference_timezone,
+                locale_list.GetReferenceLocale(), ungrounded_datetime);
+        TC3_ASSIGN_OR_RETURN(
+            const std::vector<DatetimeParseResult>& parse_datetime,
+            datetime_parse_results);
+        DatetimeParseResultSpan datetime_parse_result_span;
+        datetime_parse_result_span.target_classification_score =
+            target_classification_score_;
+        datetime_parse_result_span.priority_score = priority_score_;
+        datetime_parse_result_span.data.reserve(parse_datetime.size());
+        datetime_parse_result_span.data.insert(
+            datetime_parse_result_span.data.end(), parse_datetime.begin(),
+            parse_datetime.end());
+        datetime_parse_result_span.span =
+            evaluated_derivation.parse_tree->codepoint_span;
+
+        results.emplace_back(datetime_parse_result_span);
+      }
+    }
+  }
+  return results;
+}
+}  // namespace libtextclassifier3
diff --git a/native/annotator/datetime/grammar-parser.h b/native/annotator/datetime/grammar-parser.h
new file mode 100644
index 0000000..6ff4b46
--- /dev/null
+++ b/native/annotator/datetime/grammar-parser.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ANNOTATOR_DATETIME_GRAMMAR_PARSER_H_
+#define LIBTEXTCLASSIFIER_ANNOTATOR_DATETIME_GRAMMAR_PARSER_H_
+
+#include <string>
+#include <vector>
+
+#include "annotator/datetime/datetime-grounder.h"
+#include "annotator/datetime/parser.h"
+#include "annotator/types.h"
+#include "utils/base/statusor.h"
+#include "utils/grammar/analyzer.h"
+#include "utils/i18n/locale-list.h"
+#include "utils/utf8/unicodetext.h"
+
+namespace libtextclassifier3 {
+
+// Parses datetime expressions in the input and resolves them to actual absolute
+// time.
+class GrammarDatetimeParser : public DatetimeParser {
+ public:
+  explicit GrammarDatetimeParser(const grammar::Analyzer& analyzer,
+                                 const DatetimeGrounder& datetime_grounder,
+                                 const float target_classification_score,
+                                 const float priority_score);
+
+  // Parses the dates in 'input' and fills result. Makes sure that the results
+  // do not overlap.
+  // If 'anchor_start_end' is true the extracted results need to start at the
+  // beginning of 'input' and end at the end of it.
+  StatusOr<std::vector<DatetimeParseResultSpan>> Parse(
+      const std::string& input, int64 reference_time_ms_utc,
+      const std::string& reference_timezone, const LocaleList& locale_list,
+      ModeFlag mode, AnnotationUsecase annotation_usecase,
+      bool anchor_start_end) const override;
+
+  // Same as above but takes UnicodeText.
+  StatusOr<std::vector<DatetimeParseResultSpan>> Parse(
+      const UnicodeText& input, int64 reference_time_ms_utc,
+      const std::string& reference_timezone, const LocaleList& locale_list,
+      ModeFlag mode, AnnotationUsecase annotation_usecase,
+      bool anchor_start_end) const override;
+
+ private:
+  const grammar::Analyzer& analyzer_;
+  const DatetimeGrounder& datetime_grounder_;
+  const float target_classification_score_;
+  const float priority_score_;
+};
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ANNOTATOR_DATETIME_GRAMMAR_PARSER_H_
diff --git a/native/annotator/datetime/grammar-parser_test.cc b/native/annotator/datetime/grammar-parser_test.cc
new file mode 100644
index 0000000..cf2dffd
--- /dev/null
+++ b/native/annotator/datetime/grammar-parser_test.cc
@@ -0,0 +1,554 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/datetime/grammar-parser.h"
+
+#include <memory>
+#include <string>
+
+#include "annotator/datetime/datetime-grounder.h"
+#include "annotator/datetime/testing/base-parser-test.h"
+#include "annotator/datetime/testing/datetime-component-builder.h"
+#include "utils/grammar/analyzer.h"
+#include "utils/jvm-test-utils.h"
+#include "utils/test-data-test-utils.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+
+using ::libtextclassifier3::grammar::Analyzer;
+using ::libtextclassifier3::grammar::RulesSet;
+
+namespace libtextclassifier3 {
+namespace {
+std::string GetModelPath() { return GetTestDataPath("annotator/test_data/"); }
+
+std::string ReadFile(const std::string& file_name) {
+  std::ifstream file_stream(file_name);
+  return std::string(std::istreambuf_iterator<char>(file_stream), {});
+}
+
+class GrammarDatetimeParserTest : public DateTimeParserTest {
+ public:
+  void SetUp() override {
+    grammar_buffer_ = ReadFile(GetModelPath() + "datetime.fb");
+    unilib_ = CreateUniLibForTesting();
+    calendarlib_ = CreateCalendarLibForTesting();
+    analyzer_ = std::make_unique<Analyzer>(
+        unilib_.get(), flatbuffers::GetRoot<RulesSet>(grammar_buffer_.data()));
+    datetime_grounder_ = std::make_unique<DatetimeGrounder>(calendarlib_.get());
+    parser_.reset(new GrammarDatetimeParser(*analyzer_, *datetime_grounder_,
+                                            /*target_classification_score=*/1.0,
+                                            /*priority_score=*/1.0));
+  }
+
+  // Exposes the date time parser for tests and evaluations.
+  const DatetimeParser* DatetimeParserForTests() const override {
+    return parser_.get();
+  }
+
+ private:
+  std::string grammar_buffer_;
+  std::unique_ptr<UniLib> unilib_;
+  std::unique_ptr<CalendarLib> calendarlib_;
+  std::unique_ptr<Analyzer> analyzer_;
+  std::unique_ptr<DatetimeGrounder> datetime_grounder_;
+  std::unique_ptr<DatetimeParser> parser_;
+};
+
+TEST_F(GrammarDatetimeParserTest, ParseShort) {
+  EXPECT_TRUE(ParsesCorrectly(
+      "{01/02/2020}", 1580511600000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 2)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2020)
+           .Build()},
+      /*anchor_start_end=*/false,
+      /*timezone=*/"Europe/Zurich",
+      /*locales=*/"en-GB"));
+
+  EXPECT_TRUE(ParsesCorrectly(
+      "{01/02/2020}", 1577919600000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 2)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2020)
+           .Build()}));
+
+  EXPECT_TRUE(ParsesCorrectly(
+      "{January 1, 1988}", 567990000000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 1988)
+           .Build()}));
+}
+
+TEST_F(GrammarDatetimeParserTest, Parse) {
+  EXPECT_TRUE(ParsesCorrectly(
+      "{January 1, 1988}", 567990000000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 1988)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{january 31 2018}", 1517353200000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 31)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2018)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "foo {1 january 2018} bar", 1514761200000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2018)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{09/Mar/2004 22:02:40}", 1078866160000, GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::SECOND, 40)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 02)
+           .Add(DatetimeComponent::ComponentType::HOUR, 22)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 9)
+           .Add(DatetimeComponent::ComponentType::MONTH, 3)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2004)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{Dec 2, 2010 2:39:58 AM}", 1291253998000, GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 58)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 39)
+           .Add(DatetimeComponent::ComponentType::HOUR, 2)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 2)
+           .Add(DatetimeComponent::ComponentType::MONTH, 12)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2010)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{Jun 09 2011 15:28:14}", 1307626094000, GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::SECOND, 14)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 28)
+           .Add(DatetimeComponent::ComponentType::HOUR, 15)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 9)
+           .Add(DatetimeComponent::ComponentType::MONTH, 6)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2011)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{2010-06-26 02:31:29}", {1277512289000, 1277555489000},
+      GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 29)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 31)
+           .Add(DatetimeComponent::ComponentType::HOUR, 2)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 26)
+           .Add(DatetimeComponent::ComponentType::MONTH, 6)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2010)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 29)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 31)
+           .Add(DatetimeComponent::ComponentType::HOUR, 2)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 26)
+           .Add(DatetimeComponent::ComponentType::MONTH, 6)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2010)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{2006/01/22 04:11:05}", {1137899465000, 1137942665000},
+      GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 5)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 11)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 22)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2006)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 5)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 11)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 22)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2006)
+           .Build()}));
+  EXPECT_TRUE(
+      ParsesCorrectly("{11:42:35}", {38555000, 81755000}, GRANULARITY_SECOND,
+                      {DatetimeComponentsBuilder()
+                           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+                           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+                           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+                           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+                           .Build(),
+                       DatetimeComponentsBuilder()
+                           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+                           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+                           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+                           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+                           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{23/Apr 11:42:35}", {9715355000, 9758555000}, GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{23/Apr/2015 11:42:35}", {1429782155000, 1429825355000},
+      GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{23-Apr-2015 11:42:35}", {1429782155000, 1429825355000},
+      GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{23 Apr 2015 11:42:35}", {1429782155000, 1429825355000},
+      GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "Are sentiments apartments decisively the especially alteration. "
+      "Thrown shy denote ten ladies though ask saw. Or by to he going "
+      "think order event music. Incommode so intention defective at "
+      "convinced. Led income months itself and houses you. After nor "
+      "you leave might share court balls. {19/apr/2010 06:36:15} Are "
+      "sentiments apartments decisively the especially alteration. "
+      "Thrown shy denote ten ladies though ask saw. Or by to he going "
+      "think order event music. Incommode so intention defective at "
+      "convinced. Led income months itself and houses you. After nor "
+      "you leave might share court balls. ",
+      {1271651775000, 1271694975000}, GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 15)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 36)
+           .Add(DatetimeComponent::ComponentType::HOUR, 6)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 19)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2010)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 15)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 36)
+           .Add(DatetimeComponent::ComponentType::HOUR, 6)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 19)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2010)
+           .Build()}));
+
+  EXPECT_TRUE(ParsesCorrectly(
+      "{january 1 2018 at 4:30}", {1514777400000, 1514820600000},
+      GRANULARITY_MINUTE,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 30)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2018)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 30)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2018)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{january 1 2018 at 4:30 am}", 1514777400000, GRANULARITY_MINUTE,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 30)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2018)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{january 1 2018 at 4pm}", 1514818800000, GRANULARITY_HOUR,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2018)
+           .Build()}));
+
+  EXPECT_TRUE(ParsesCorrectly(
+      "{today at 0:00}", {-3600000, 39600000}, GRANULARITY_MINUTE,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 0)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::NOW, 0)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 0)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::NOW, 0)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{today at 0:00}", {-57600000, -14400000}, GRANULARITY_MINUTE,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 0)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::NOW, 0)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 0)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::NOW, 0)
+           .Build()},
+      /*anchor_start_end=*/false, "America/Los_Angeles"));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{tomorrow at 4:00}", {97200000, 140400000}, GRANULARITY_MINUTE,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::TOMORROW, 1)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::TOMORROW, 1)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{tomorrow at 4am}", 97200000, GRANULARITY_HOUR,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::TOMORROW, 1)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "last seen {today at 9:01 PM}", 72060000, GRANULARITY_MINUTE,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 1)
+           .Add(DatetimeComponent::ComponentType::HOUR, 9)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::NOW, 0)
+           .Build()}));
+  EXPECT_TRUE(
+      ParsesCorrectly("set an alarm for {7 a.m}", 21600000, GRANULARITY_HOUR,
+                      {DatetimeComponentsBuilder()
+                           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+                           .Add(DatetimeComponent::ComponentType::HOUR, 7)
+                           .Build()}));
+
+  EXPECT_TRUE(ParsesCorrectly(
+      "{04/23/15 11:42:35}", {1429782155000, 1429825355000}, GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build()}));
+
+  EXPECT_TRUE(ParsesCorrectly(
+      "{04/23/2015 11:42:35}", {1429782155000, 1429825355000},
+      GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build(),
+       DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 35)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 42)
+           .Add(DatetimeComponent::ComponentType::HOUR, 11)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 23)
+           .Add(DatetimeComponent::ComponentType::MONTH, 4)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2015)
+           .Build()}));
+
+  EXPECT_TRUE(ParsesCorrectly(
+      "{9/28/2011 2:23:15 PM}", 1317212595000, GRANULARITY_SECOND,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 1)
+           .Add(DatetimeComponent::ComponentType::SECOND, 15)
+           .Add(DatetimeComponent::ComponentType::MINUTE, 23)
+           .Add(DatetimeComponent::ComponentType::HOUR, 2)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 28)
+           .Add(DatetimeComponent::ComponentType::MONTH, 9)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2011)
+           .Build()}));
+}
+
+TEST_F(GrammarDatetimeParserTest, DateValidation) {
+  EXPECT_TRUE(ParsesCorrectly(
+      "{01/02/2020}", 1577919600000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 2)
+           .Add(DatetimeComponent::ComponentType::MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2020)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{21/02/2020}", 1582239600000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 21)
+           .Add(DatetimeComponent::ComponentType::MONTH, 2)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2020)
+           .Build()}));
+}
+
+TEST_F(GrammarDatetimeParserTest, OnlyRelativeDatetime) {
+  EXPECT_TRUE(
+      ParsesCorrectly("{in 3 hours}", 10800000, GRANULARITY_HOUR,
+                      {DatetimeComponentsBuilder()
+                           .Add(DatetimeComponent::ComponentType::HOUR, 0,
+                                DatetimeComponent::RelativeQualifier::FUTURE, 3)
+                           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "{wednesday at 4am}", 529200000, GRANULARITY_HOUR,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 4)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_WEEK, 4,
+                DatetimeComponent::RelativeQualifier::THIS, 0)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "set an alarm for {7am tomorrow}", 108000000, GRANULARITY_HOUR,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::MERIDIEM, 0)
+           .Add(DatetimeComponent::ComponentType::HOUR, 7)
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 0,
+                DatetimeComponent::RelativeQualifier::TOMORROW, 1)
+           .Build()}));
+  EXPECT_TRUE(ParsesCorrectly(
+      "called you {last Saturday}",
+      -432000000 /* Fri 1969-12-26 16:00:00 PST */, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_WEEK, 7,
+                DatetimeComponent::RelativeQualifier::PAST, -1)
+           .Build()},
+      /*anchor_start_end=*/false,
+      /*timezone=*/"Europe/Zurich", /*locales=*/"en-US",
+      /*annotation_usecase=*/AnnotationUsecase_ANNOTATION_USECASE_RAW));
+}
+
+TEST_F(GrammarDatetimeParserTest, NamedMonthDate) {
+  EXPECT_TRUE(ParsesCorrectly(
+      "{march 1, 2017}", 1488355200000, GRANULARITY_DAY,
+      {DatetimeComponentsBuilder()
+           .Add(DatetimeComponent::ComponentType::DAY_OF_MONTH, 1)
+           .Add(DatetimeComponent::ComponentType::MONTH, 3)
+           .Add(DatetimeComponent::ComponentType::YEAR, 2017)
+           .Build()},
+      false, "America/Los_Angeles", "en-US",
+      AnnotationUsecase_ANNOTATION_USECASE_SMART));
+}
+
+}  // namespace
+}  // namespace libtextclassifier3
diff --git a/native/annotator/datetime/regex-parser_test.cc b/native/annotator/datetime/regex-parser_test.cc
index a0d9adf..33f14a4 100644
--- a/native/annotator/datetime/regex-parser_test.cc
+++ b/native/annotator/datetime/regex-parser_test.cc
@@ -51,7 +51,8 @@
   void SetUp() override {
     // Loads default unmodified model. Individual tests can call LoadModel to
     // make changes.
-    LoadModel([](ModelT* model) {});
+    LoadModel(
+        [](ModelT* model) { model->datetime_grammar_model.reset(nullptr); });
   }
 
   template <typename Fn>
@@ -693,6 +694,7 @@
     // In the test model, the prefer_future_for_unspecified_date is true; make
     // it false only for this test.
     model->datetime_model->prefer_future_for_unspecified_date = false;
+    model->datetime_grammar_model.reset(nullptr);
   });
 
   EXPECT_TRUE(ParsesCorrectly(
@@ -1265,6 +1267,7 @@
   LoadModel([](ModelT* model) {
     model->datetime_model->generate_alternative_interpretations_when_ambiguous =
         false;
+    model->datetime_grammar_model.reset(nullptr);
   });
 
   EXPECT_TRUE(ParsesCorrectly(
diff --git a/native/annotator/datetime/utils.cc b/native/annotator/datetime/utils.cc
index 30a99a1..d772809 100644
--- a/native/annotator/datetime/utils.cc
+++ b/native/annotator/datetime/utils.cc
@@ -64,4 +64,15 @@
   }
 }
 
+int GetAdjustedYear(const int parsed_year) {
+  if (parsed_year < 100) {
+    if (parsed_year < 50) {
+      return parsed_year + 2000;
+    } else {
+      return parsed_year + 1900;
+    }
+  }
+  return parsed_year;
+}
+
 }  // namespace libtextclassifier3
diff --git a/native/annotator/datetime/utils.h b/native/annotator/datetime/utils.h
index cdf1c8b..297ed1d 100644
--- a/native/annotator/datetime/utils.h
+++ b/native/annotator/datetime/utils.h
@@ -30,6 +30,8 @@
                          const DatetimeGranularity& granularity,
                          std::vector<DatetimeParsedData>* interpretations);
 
+// Logic to decide if XX will be 20XX or 19XX
+int GetAdjustedYear(const int parsed_year);
 }  // namespace libtextclassifier3
 
 #endif  // LIBTEXTCLASSIFIER_ANNOTATOR_DATETIME_UTILS_H_
diff --git a/native/annotator/entity-data.fbs b/native/annotator/entity-data.fbs
old mode 100755
new mode 100644
diff --git a/native/annotator/experimental/experimental.fbs b/native/annotator/experimental/experimental.fbs
old mode 100755
new mode 100644
diff --git a/native/annotator/feature-processor.cc b/native/annotator/feature-processor.cc
index 99e25e1..93c3636 100644
--- a/native/annotator/feature-processor.cc
+++ b/native/annotator/feature-processor.cc
@@ -141,26 +141,23 @@
     std::vector<Token>* tokens) const {
   const UnicodeText context_unicode = UTF8ToUnicodeText(context,
                                                         /*do_copy=*/false);
-  StripTokensFromOtherLines(context_unicode, span, tokens);
+  const auto [span_begin, span_end] =
+      CodepointSpanToUnicodeTextRange(context_unicode, span);
+  StripTokensFromOtherLines(context_unicode, span_begin, span_end, span,
+                            tokens);
 }
 
 void FeatureProcessor::StripTokensFromOtherLines(
-    const UnicodeText& context_unicode, const CodepointSpan& span,
+    const UnicodeText& context_unicode,
+    const UnicodeText::const_iterator& span_begin,
+    const UnicodeText::const_iterator& span_end, const CodepointSpan& span,
     std::vector<Token>* tokens) const {
   std::vector<UnicodeTextRange> lines =
       SplitContext(context_unicode, options_->use_pipe_character_for_newline());
 
-  auto span_start = context_unicode.begin();
-  if (span.first > 0) {
-    std::advance(span_start, span.first);
-  }
-  auto span_end = context_unicode.begin();
-  if (span.second > 0) {
-    std::advance(span_end, span.second);
-  }
   for (const UnicodeTextRange& line : lines) {
     // Find the line that completely contains the span.
-    if (line.first <= span_start && line.second >= span_end) {
+    if (line.first <= span_begin && line.second >= span_end) {
       const CodepointIndex last_line_begin_index =
           std::distance(context_unicode.begin(), line.first);
       const CodepointIndex last_line_end_index =
@@ -365,6 +362,19 @@
           selectable_tokens[token_span.second - 1].end};
 }
 
+UnicodeTextRange CodepointSpanToUnicodeTextRange(
+    const UnicodeText& unicode_text, const CodepointSpan& span) {
+  auto begin = unicode_text.begin();
+  if (span.first > 0) {
+    std::advance(begin, span.first);
+  }
+  auto end = unicode_text.begin();
+  if (span.second > 0) {
+    std::advance(end, span.second);
+  }
+  return {begin, end};
+}
+
 namespace {
 
 // Finds a single token that completely contains the given span.
@@ -581,10 +591,8 @@
     return span;
   }
 
-  UnicodeText::const_iterator span_begin = context_unicode.begin();
-  std::advance(span_begin, span.first);
-  UnicodeText::const_iterator span_end = context_unicode.begin();
-  std::advance(span_end, span.second);
+  const auto [span_begin, span_end] =
+      CodepointSpanToUnicodeTextRange(context_unicode, span);
 
   return StripBoundaryCodepoints(span_begin, span_end, span);
 }
@@ -692,14 +700,18 @@
                                               int* click_pos) const {
   const UnicodeText context_unicode =
       UTF8ToUnicodeText(context, /*do_copy=*/false);
-  RetokenizeAndFindClick(context_unicode, input_span, only_use_line_with_click,
-                         tokens, click_pos);
+  const auto [span_begin, span_end] =
+      CodepointSpanToUnicodeTextRange(context_unicode, input_span);
+  RetokenizeAndFindClick(context_unicode, span_begin, span_end, input_span,
+                         only_use_line_with_click, tokens, click_pos);
 }
 
 void FeatureProcessor::RetokenizeAndFindClick(
-    const UnicodeText& context_unicode, const CodepointSpan& input_span,
-    bool only_use_line_with_click, std::vector<Token>* tokens,
-    int* click_pos) const {
+    const UnicodeText& context_unicode,
+    const UnicodeText::const_iterator& span_begin,
+    const UnicodeText::const_iterator& span_end,
+    const CodepointSpan& input_span, bool only_use_line_with_click,
+    std::vector<Token>* tokens, int* click_pos) const {
   TC3_CHECK(tokens != nullptr);
 
   if (options_->split_tokens_on_selection_boundaries()) {
@@ -707,7 +719,8 @@
   }
 
   if (only_use_line_with_click) {
-    StripTokensFromOtherLines(context_unicode, input_span, tokens);
+    StripTokensFromOtherLines(context_unicode, span_begin, span_end, input_span,
+                              tokens);
   }
 
   int local_click_pos;
diff --git a/native/annotator/feature-processor.h b/native/annotator/feature-processor.h
index 482d274..554727a 100644
--- a/native/annotator/feature-processor.h
+++ b/native/annotator/feature-processor.h
@@ -83,6 +83,18 @@
 CodepointSpan TokenSpanToCodepointSpan(
     const std::vector<Token>& selectable_tokens, const TokenSpan& token_span);
 
+// Converts a codepoint span to a unicode text range, within the given unicode
+// text.
+// For an invalid span (with a negative index), returns (begin, begin). This
+// means that it is safe to call this function before checking the validity of
+// the span.
+// The indices must fit within the unicode text.
+// Note that the execution time is linear with respect to the codepoint indices.
+// Calling this function repeatedly for spans on the same text might lead to
+// inefficient code.
+UnicodeTextRange CodepointSpanToUnicodeTextRange(
+    const UnicodeText& unicode_text, const CodepointSpan& span);
+
 // Takes care of preparing features for the span prediction model.
 class FeatureProcessor {
  public:
@@ -138,8 +150,11 @@
                               bool only_use_line_with_click,
                               std::vector<Token>* tokens, int* click_pos) const;
 
-  // Same as above but takes UnicodeText.
+  // Same as above, but takes UnicodeText and iterators within it corresponding
+  // to input_span.
   void RetokenizeAndFindClick(const UnicodeText& context_unicode,
+                              const UnicodeText::const_iterator& span_begin,
+                              const UnicodeText::const_iterator& span_end,
                               const CodepointSpan& input_span,
                               bool only_use_line_with_click,
                               std::vector<Token>* tokens, int* click_pos) const;
@@ -261,6 +276,8 @@
 
   // Same as above but takes UnicodeText.
   void StripTokensFromOtherLines(const UnicodeText& context_unicode,
+                                 const UnicodeText::const_iterator& span_begin,
+                                 const UnicodeText::const_iterator& span_end,
                                  const CodepointSpan& span,
                                  std::vector<Token>* tokens) const;
 
diff --git a/native/annotator/knowledge/knowledge-engine-dummy.h b/native/annotator/knowledge/knowledge-engine-dummy.h
index b6c4f42..34fa490 100644
--- a/native/annotator/knowledge/knowledge-engine-dummy.h
+++ b/native/annotator/knowledge/knowledge-engine-dummy.h
@@ -37,19 +37,19 @@
 
   void SetPriorityScore(float priority_score) {}
 
-  bool ClassifyText(const std::string& text, CodepointSpan selection_indices,
-                    AnnotationUsecase annotation_usecase,
-                    const Optional<LocationContext>& location_context,
-                    const Permissions& permissions,
-                    ClassificationResult* classification_result) const {
-    return false;
+  Status ClassifyText(const std::string& text, CodepointSpan selection_indices,
+                      AnnotationUsecase annotation_usecase,
+                      const Optional<LocationContext>& location_context,
+                      const Permissions& permissions,
+                      ClassificationResult* classification_result) const {
+    return Status(StatusCode::UNIMPLEMENTED, "Not implemented.");
   }
 
-  bool Chunk(const std::string& text, AnnotationUsecase annotation_usecase,
-             const Optional<LocationContext>& location_context,
-             const Permissions& permissions, const AnnotateMode annotate_mode,
-             Annotations* result) const {
-    return true;
+  Status Chunk(const std::string& text, AnnotationUsecase annotation_usecase,
+               const Optional<LocationContext>& location_context,
+               const Permissions& permissions, const AnnotateMode annotate_mode,
+               Annotations* result) const {
+    return Status::OK;
   }
 
   Status ChunkMultipleSpans(
@@ -62,9 +62,8 @@
     return Status::OK;
   }
 
-  bool LookUpEntity(const std::string& id,
-                    std::string* serialized_knowledge_result) const {
-    return false;
+  StatusOr<std::string> LookUpEntity(const std::string& id) const {
+    return Status(StatusCode::UNIMPLEMENTED, "Not implemented.");
   }
 
   StatusOr<std::string> LookUpEntityProperty(
diff --git a/native/annotator/model.fbs b/native/annotator/model.fbs
old mode 100755
new mode 100644
index f639f06..57187f5
--- a/native/annotator/model.fbs
+++ b/native/annotator/model.fbs
@@ -426,6 +426,12 @@
 
   // Grammar specific tokenizer options.
   tokenizer_options:GrammarTokenizerOptions;
+
+  // The score.
+  target_classification_score:float = 1;
+
+  // The priority score used for conflict resolution with the other models.
+  priority_score:float = 1;
 }
 
 namespace libtextclassifier3.MoneyParsingOptions_;
@@ -596,6 +602,7 @@
   experimental_model:ExperimentalModel;
   pod_ner_model:PodNerModel;
   vocab_model:VocabModel;
+  datetime_grammar_model:GrammarModel;
 }
 
 // Method for selecting the center token.
diff --git a/native/annotator/person_name/person_name_model.fbs b/native/annotator/person_name/person_name_model.fbs
old mode 100755
new mode 100644
diff --git a/native/annotator/pod_ner/pod-ner-dummy.h b/native/annotator/pod_ner/pod-ner-dummy.h
deleted file mode 100644
index c2ee00f..0000000
--- a/native/annotator/pod_ner/pod-ner-dummy.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_DUMMY_H_
-#define LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_DUMMY_H_
-
-#include <memory>
-
-#include "annotator/model_generated.h"
-#include "annotator/types.h"
-#include "utils/utf8/unicodetext.h"
-#include "utils/utf8/unilib.h"
-
-namespace libtextclassifier3 {
-
-// Dummy version of POD NER annotator. To be included in builds that do not
-// want POD NER support.
-class PodNerAnnotator {
- public:
-  static std::unique_ptr<PodNerAnnotator> Create(const PodNerModel *model,
-                                                 const UniLib &unilib) {
-    return nullptr;
-  }
-
-  bool Annotate(const UnicodeText &context,
-                std::vector<AnnotatedSpan> *results) const {
-    return true;
-  }
-
-  bool SuggestSelection(const UnicodeText &context, CodepointSpan click,
-                        AnnotatedSpan *result) const {
-    return {};
-  }
-
-  bool ClassifyText(const UnicodeText &context, CodepointSpan click,
-                    ClassificationResult *result) const {
-    return false;
-  }
-
-  std::vector<std::string> GetSupportedCollections() const { return {}; }
-};
-
-}  // namespace libtextclassifier3
-
-#endif  // LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_DUMMY_H_
diff --git a/native/annotator/pod_ner/pod-ner-impl.cc b/native/annotator/pod_ner/pod-ner-impl.cc
new file mode 100644
index 0000000..666b7c7
--- /dev/null
+++ b/native/annotator/pod_ner/pod-ner-impl.cc
@@ -0,0 +1,520 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/pod_ner/pod-ner-impl.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <ctime>
+#include <iostream>
+#include <memory>
+#include <ostream>
+#include <unordered_set>
+#include <vector>
+
+#include "annotator/model_generated.h"
+#include "annotator/pod_ner/utils.h"
+#include "annotator/types.h"
+#include "utils/base/logging.h"
+#include "utils/bert_tokenizer.h"
+#include "utils/tflite-model-executor.h"
+#include "utils/tokenizer-utils.h"
+#include "utils/utf8/unicodetext.h"
+#include "absl/strings/ascii.h"
+#include "tensorflow/lite/kernels/builtin_op_kernels.h"
+#include "tensorflow/lite/mutable_op_resolver.h"
+#include "tensorflow_lite_support/cc/text/tokenizers/tokenizer.h"
+#include "tensorflow_models/seq_flow_lite/tflite_ops/layer_norm.h"
+#include "tensorflow_models/seq_flow_lite/tflite_ops/quantization_util.h"
+
+namespace libtextclassifier3 {
+
+using PodNerModel_::CollectionT;
+using PodNerModel_::LabelT;
+using ::tflite::support::text::tokenizer::TokenizerResult;
+
+namespace {
+
+using PodNerModel_::Label_::BoiseType;
+using PodNerModel_::Label_::BoiseType_BEGIN;
+using PodNerModel_::Label_::BoiseType_END;
+using PodNerModel_::Label_::BoiseType_INTERMEDIATE;
+using PodNerModel_::Label_::BoiseType_O;
+using PodNerModel_::Label_::BoiseType_SINGLE;
+using PodNerModel_::Label_::MentionType;
+using PodNerModel_::Label_::MentionType_NAM;
+using PodNerModel_::Label_::MentionType_NOM;
+using PodNerModel_::Label_::MentionType_UNDEFINED;
+
+void EmplaceToLabelVector(BoiseType boise_type, MentionType mention_type,
+                          int collection_id, std::vector<LabelT> *labels) {
+  labels->emplace_back();
+  labels->back().boise_type = boise_type;
+  labels->back().mention_type = mention_type;
+  labels->back().collection_id = collection_id;
+}
+
+void FillDefaultLabelsAndCollections(float default_priority,
+                                     std::vector<LabelT> *labels,
+                                     std::vector<CollectionT> *collections) {
+  std::vector<std::string> collection_names = {
+      "art",          "consumer_good", "event",  "location",
+      "organization", "ner_entity",    "person", "undefined"};
+  collections->clear();
+  for (const std::string &collection_name : collection_names) {
+    collections->emplace_back();
+    collections->back().name = collection_name;
+    collections->back().single_token_priority_score = default_priority;
+    collections->back().multi_token_priority_score = default_priority;
+  }
+
+  labels->clear();
+  for (auto boise_type :
+       {BoiseType_BEGIN, BoiseType_END, BoiseType_INTERMEDIATE}) {
+    for (auto mention_type : {MentionType_NAM, MentionType_NOM}) {
+      for (int i = 0; i < collections->size() - 1; ++i) {  // skip undefined
+        EmplaceToLabelVector(boise_type, mention_type, i, labels);
+      }
+    }
+  }
+  EmplaceToLabelVector(BoiseType_O, MentionType_UNDEFINED, 7, labels);
+  for (auto mention_type : {MentionType_NAM, MentionType_NOM}) {
+    for (int i = 0; i < collections->size() - 1; ++i) {  // skip undefined
+      EmplaceToLabelVector(BoiseType_SINGLE, mention_type, i, labels);
+    }
+  }
+}
+
+std::unique_ptr<tflite::Interpreter> CreateInterpreter(
+    const PodNerModel *model) {
+  TC3_CHECK(model != nullptr);
+  if (model->tflite_model() == nullptr) {
+    TC3_LOG(ERROR) << "Unable to create tf.lite interpreter, model is null.";
+    return nullptr;
+  }
+
+  const tflite::Model *tflite_model =
+      tflite::GetModel(model->tflite_model()->Data());
+  if (tflite_model == nullptr) {
+    TC3_LOG(ERROR) << "Unable to create tf.lite interpreter, model is null.";
+    return nullptr;
+  }
+
+  std::unique_ptr<tflite::OpResolver> resolver =
+      BuildOpResolver([](tflite::MutableOpResolver *mutable_resolver) {
+        mutable_resolver->AddBuiltin(::tflite::BuiltinOperator_SHAPE,
+                                     ::tflite::ops::builtin::Register_SHAPE());
+        mutable_resolver->AddBuiltin(::tflite::BuiltinOperator_RANGE,
+                                     ::tflite::ops::builtin::Register_RANGE());
+        mutable_resolver->AddBuiltin(
+            ::tflite::BuiltinOperator_ARG_MAX,
+            ::tflite::ops::builtin::Register_ARG_MAX());
+        mutable_resolver->AddBuiltin(
+            ::tflite::BuiltinOperator_EXPAND_DIMS,
+            ::tflite::ops::builtin::Register_EXPAND_DIMS());
+        mutable_resolver->AddCustom(
+            "LayerNorm", ::seq_flow_lite::ops::custom::Register_LAYER_NORM());
+      });
+
+  std::unique_ptr<tflite::Interpreter> tflite_interpreter;
+  tflite::InterpreterBuilder(tflite_model, *resolver,
+                             nullptr)(&tflite_interpreter);
+  if (tflite_interpreter == nullptr) {
+    TC3_LOG(ERROR) << "Unable to create tf.lite interpreter.";
+    return nullptr;
+  }
+  return tflite_interpreter;
+}
+
+bool FindSpecialWordpieceIds(const std::unique_ptr<BertTokenizer> &tokenizer,
+                             int *cls_id, int *sep_id, int *period_id,
+                             int *unknown_id) {
+  if (!tokenizer->LookupId("[CLS]", cls_id)) {
+    TC3_LOG(ERROR) << "Couldn't find [CLS] wordpiece.";
+    return false;
+  }
+  if (!tokenizer->LookupId("[SEP]", sep_id)) {
+    TC3_LOG(ERROR) << "Couldn't find [SEP] wordpiece.";
+    return false;
+  }
+  if (!tokenizer->LookupId(".", period_id)) {
+    TC3_LOG(ERROR) << "Couldn't find [.] wordpiece.";
+    return false;
+  }
+  if (!tokenizer->LookupId("[UNK]", unknown_id)) {
+    TC3_LOG(ERROR) << "Couldn't find [UNK] wordpiece.";
+    return false;
+  }
+  return true;
+}
+// WARNING: This tokenizer is not exactly the one the model was trained with
+// so there might be nuances.
+std::unique_ptr<BertTokenizer> CreateTokenizer(const PodNerModel *model) {
+  TC3_CHECK(model != nullptr);
+  if (model->word_piece_vocab() == nullptr) {
+    TC3_LOG(ERROR)
+        << "Unable to create tokenizer, model or word_pieces is null.";
+    return nullptr;
+  }
+
+  return std::unique_ptr<BertTokenizer>(new BertTokenizer(
+      reinterpret_cast<const char *>(model->word_piece_vocab()->Data()),
+      model->word_piece_vocab()->size()));
+}
+
+}  // namespace
+
+std::unique_ptr<PodNerAnnotator> PodNerAnnotator::Create(
+    const PodNerModel *model, const UniLib &unilib) {
+  if (model == nullptr) {
+    TC3_LOG(ERROR) << "Create received null model.";
+    return nullptr;
+  }
+
+  std::unique_ptr<BertTokenizer> tokenizer = CreateTokenizer(model);
+  if (tokenizer == nullptr) {
+    return nullptr;
+  }
+
+  int cls_id, sep_id, period_id, unknown_wordpiece_id;
+  if (!FindSpecialWordpieceIds(tokenizer, &cls_id, &sep_id, &period_id,
+                               &unknown_wordpiece_id)) {
+    return nullptr;
+  }
+
+  std::unique_ptr<PodNerAnnotator> annotator(new PodNerAnnotator(unilib));
+  annotator->tokenizer_ = std::move(tokenizer);
+  annotator->lowercase_input_ = model->lowercase_input();
+  annotator->logits_index_in_output_tensor_ =
+      model->logits_index_in_output_tensor();
+  annotator->append_final_period_ = model->append_final_period();
+  if (model->labels() && model->labels()->size() > 0 && model->collections() &&
+      model->collections()->size() > 0) {
+    annotator->labels_.clear();
+    for (const PodNerModel_::Label *label : *model->labels()) {
+      annotator->labels_.emplace_back();
+      annotator->labels_.back().boise_type = label->boise_type();
+      annotator->labels_.back().mention_type = label->mention_type();
+      annotator->labels_.back().collection_id = label->collection_id();
+    }
+    for (const PodNerModel_::Collection *collection : *model->collections()) {
+      annotator->collections_.emplace_back();
+      annotator->collections_.back().name = collection->name()->str();
+      annotator->collections_.back().single_token_priority_score =
+          collection->single_token_priority_score();
+      annotator->collections_.back().multi_token_priority_score =
+          collection->multi_token_priority_score();
+    }
+  } else {
+    FillDefaultLabelsAndCollections(
+        model->priority_score(), &annotator->labels_, &annotator->collections_);
+  }
+  int max_num_surrounding_wordpieces = model->append_final_period() ? 3 : 2;
+  annotator->max_num_effective_wordpieces_ =
+      model->max_num_wordpieces() - max_num_surrounding_wordpieces;
+  annotator->sliding_window_num_wordpieces_overlap_ =
+      model->sliding_window_num_wordpieces_overlap();
+  annotator->max_ratio_unknown_wordpieces_ =
+      model->max_ratio_unknown_wordpieces();
+  annotator->min_number_of_tokens_ = model->min_number_of_tokens();
+  annotator->min_number_of_wordpieces_ = model->min_number_of_wordpieces();
+  annotator->cls_wordpiece_id_ = cls_id;
+  annotator->sep_wordpiece_id_ = sep_id;
+  annotator->period_wordpiece_id_ = period_id;
+  annotator->unknown_wordpiece_id_ = unknown_wordpiece_id;
+  annotator->model_ = model;
+
+  return annotator;
+}
+
+std::vector<LabelT> PodNerAnnotator::ReadResultsFromInterpreter(
+    tflite::Interpreter &interpreter) const {
+  TfLiteTensor *output =
+      interpreter.tensor(interpreter.outputs()[logits_index_in_output_tensor_]);
+  TC3_CHECK_EQ(output->dims->size, 3);
+  TC3_CHECK_EQ(output->dims->data[0], 1);
+  TC3_CHECK_EQ(output->dims->data[2], labels_.size());
+  std::vector<LabelT> return_value(output->dims->data[1]);
+  std::vector<float> probs(output->dims->data[1]);
+  for (int step = 0, index = 0; step < output->dims->data[1]; ++step) {
+    float max_prob = 0.0f;
+    int max_index = 0;
+    for (int cindex = 0; cindex < output->dims->data[2]; ++cindex) {
+      const float probability =
+          ::seq_flow_lite::PodDequantize(*output, index++);
+      if (probability > max_prob) {
+        max_prob = probability;
+        max_index = cindex;
+      }
+    }
+    return_value[step] = labels_[max_index];
+    probs[step] = max_prob;
+  }
+  return return_value;
+}
+
+std::vector<LabelT> PodNerAnnotator::ExecuteModel(
+    const VectorSpan<int> &wordpiece_indices,
+    const VectorSpan<int32_t> &token_starts,
+    const VectorSpan<Token> &tokens) const {
+  // Check that there are not more input indices than supported.
+  if (wordpiece_indices.size() > max_num_effective_wordpieces_) {
+    TC3_LOG(ERROR) << "More than " << max_num_effective_wordpieces_
+                   << " indices passed to POD NER model.";
+    return {};
+  }
+  if (wordpiece_indices.size() <= 0 || token_starts.size() <= 0 ||
+      tokens.size() <= 0) {
+    TC3_LOG(ERROR) << "ExecuteModel received illegal input, #wordpiece_indices="
+                   << wordpiece_indices.size()
+                   << " #token_starts=" << token_starts.size()
+                   << " #tokens=" << tokens.size();
+    return {};
+  }
+
+  // For the CLS (at the beginning) and SEP (at the end) wordpieces.
+  int num_additional_wordpieces = 2;
+  bool should_append_final_period = false;
+  // Optionally add a final period wordpiece if the final token is not
+  // already punctuation. This can improve performance for models trained on
+  // data mostly ending in sentence-final punctuation.
+  const std::string &last_token = (tokens.end() - 1)->value;
+  if (append_final_period_ &&
+      (last_token.size() != 1 || !unilib_.IsPunctuation(last_token.at(0)))) {
+    should_append_final_period = true;
+    num_additional_wordpieces++;
+  }
+
+  // Interpreter needs to be created for each inference call separately,
+  // otherwise the class is not thread-safe.
+  std::unique_ptr<tflite::Interpreter> interpreter = CreateInterpreter(model_);
+  if (interpreter == nullptr) {
+    TC3_LOG(ERROR) << "Couldn't create Interpreter.";
+    return {};
+  }
+
+  TfLiteStatus status;
+  status = interpreter->ResizeInputTensor(
+      interpreter->inputs()[0],
+      {1, wordpiece_indices.size() + num_additional_wordpieces});
+  TC3_CHECK_EQ(status, kTfLiteOk);
+  status = interpreter->ResizeInputTensor(interpreter->inputs()[1],
+                                          {1, token_starts.size()});
+  TC3_CHECK_EQ(status, kTfLiteOk);
+
+  status = interpreter->AllocateTensors();
+  TC3_CHECK_EQ(status, kTfLiteOk);
+
+  TfLiteTensor *tensor = interpreter->tensor(interpreter->inputs()[0]);
+  int wordpiece_tensor_index = 0;
+  tensor->data.i32[wordpiece_tensor_index++] = cls_wordpiece_id_;
+  for (int wordpiece_index : wordpiece_indices) {
+    tensor->data.i32[wordpiece_tensor_index++] = wordpiece_index;
+  }
+
+  if (should_append_final_period) {
+    tensor->data.i32[wordpiece_tensor_index++] = period_wordpiece_id_;
+  }
+  tensor->data.i32[wordpiece_tensor_index++] = sep_wordpiece_id_;
+
+  tensor = interpreter->tensor(interpreter->inputs()[1]);
+  for (int i = 0; i < token_starts.size(); ++i) {
+    // Need to add one because of the starting CLS wordpiece and reduce the
+    // offset from the first wordpiece.
+    tensor->data.i32[i] = token_starts[i] + 1 - token_starts[0];
+  }
+
+  status = interpreter->Invoke();
+  TC3_CHECK_EQ(status, kTfLiteOk);
+
+  return ReadResultsFromInterpreter(*interpreter);
+}
+
+bool PodNerAnnotator::PrepareText(const UnicodeText &text_unicode,
+                                  std::vector<int32_t> *wordpiece_indices,
+                                  std::vector<int32_t> *token_starts,
+                                  std::vector<Token> *tokens) const {
+  *tokens = TokenizeOnWhiteSpacePunctuationAndChineseLetter(
+      text_unicode.ToUTF8String());
+  tokens->erase(std::remove_if(tokens->begin(), tokens->end(),
+                               [](const Token &token) {
+                                 return token.start == token.end;
+                               }),
+                tokens->end());
+
+  for (const Token &token : *tokens) {
+    const std::string token_text =
+        lowercase_input_ ? unilib_
+                               .ToLowerText(UTF8ToUnicodeText(
+                                   token.value, /*do_copy=*/false))
+                               .ToUTF8String()
+                         : token.value;
+
+    const TokenizerResult wordpiece_tokenization =
+        tokenizer_->TokenizeSingleToken(token_text);
+
+    std::vector<int> wordpiece_ids;
+    for (const std::string &wordpiece : wordpiece_tokenization.subwords) {
+      if (!tokenizer_->LookupId(wordpiece, &(wordpiece_ids.emplace_back()))) {
+        TC3_LOG(ERROR) << "Couldn't find wordpiece " << wordpiece;
+        return false;
+      }
+    }
+
+    if (wordpiece_ids.empty()) {
+      TC3_LOG(ERROR) << "wordpiece_ids.empty()";
+      return false;
+    }
+    token_starts->push_back(wordpiece_indices->size());
+    for (const int64 wordpiece_id : wordpiece_ids) {
+      wordpiece_indices->push_back(wordpiece_id);
+    }
+  }
+
+  return true;
+}
+
+bool PodNerAnnotator::Annotate(const UnicodeText &context,
+                               std::vector<AnnotatedSpan> *results) const {
+  return AnnotateAroundSpanOfInterest(context, {0, context.size_codepoints()},
+                                      results);
+}
+
+bool PodNerAnnotator::AnnotateAroundSpanOfInterest(
+    const UnicodeText &context, const CodepointSpan &span_of_interest,
+    std::vector<AnnotatedSpan> *results) const {
+  TC3_CHECK(results != nullptr);
+
+  std::vector<int32_t> wordpiece_indices;
+  std::vector<int32_t> token_starts;
+  std::vector<Token> tokens;
+  if (!PrepareText(context, &wordpiece_indices, &token_starts, &tokens)) {
+    TC3_LOG(ERROR) << "PodNerAnnotator PrepareText(...) failed.";
+    return false;
+  }
+  const int unknown_wordpieces_count =
+      std::count(wordpiece_indices.begin(), wordpiece_indices.end(),
+                 unknown_wordpiece_id_);
+  if (tokens.empty() || tokens.size() < min_number_of_tokens_ ||
+      wordpiece_indices.size() < min_number_of_wordpieces_ ||
+      (static_cast<float>(unknown_wordpieces_count) /
+       wordpiece_indices.size()) > max_ratio_unknown_wordpieces_) {
+    return true;
+  }
+
+  std::vector<LabelT> labels;
+  int first_token_index_entire_window = 0;
+
+  WindowGenerator window_generator(
+      wordpiece_indices, token_starts, tokens, max_num_effective_wordpieces_,
+      sliding_window_num_wordpieces_overlap_, span_of_interest);
+  while (!window_generator.Done()) {
+    VectorSpan<int32_t> cur_wordpiece_indices;
+    VectorSpan<int32_t> cur_token_starts;
+    VectorSpan<Token> cur_tokens;
+    if (!window_generator.Next(&cur_wordpiece_indices, &cur_token_starts,
+                               &cur_tokens) ||
+        cur_tokens.size() <= 0 || cur_token_starts.size() <= 0 ||
+        cur_wordpiece_indices.size() <= 0) {
+      return false;
+    }
+    std::vector<LabelT> new_labels =
+        ExecuteModel(cur_wordpiece_indices, cur_token_starts, cur_tokens);
+    if (labels.empty()) {  // First loop.
+      first_token_index_entire_window = cur_tokens.begin() - tokens.begin();
+    }
+    if (!MergeLabelsIntoLeftSequence(
+            /*labels_right=*/new_labels,
+            /*index_first_right_tag_in_left=*/cur_tokens.begin() -
+                tokens.begin() - first_token_index_entire_window,
+            /*labels_left=*/&labels)) {
+      return false;
+    }
+  }
+
+  if (labels.empty()) {
+    return false;
+  }
+  ConvertTagsToAnnotatedSpans(
+      VectorSpan<Token>(tokens.begin() + first_token_index_entire_window,
+                        tokens.end()),
+      labels, collections_, {PodNerModel_::Label_::MentionType_NAM},
+      /*relaxed_inside_label_matching=*/false,
+      /*relaxed_mention_type_matching=*/false, results);
+
+  return true;
+}
+
+bool PodNerAnnotator::SuggestSelection(const UnicodeText &context,
+                                       CodepointSpan click,
+                                       AnnotatedSpan *result) const {
+  TC3_VLOG(INFO) << "POD NER SuggestSelection " << click;
+  std::vector<AnnotatedSpan> annotations;
+  if (!AnnotateAroundSpanOfInterest(context, click, &annotations)) {
+    TC3_VLOG(INFO) << "POD NER SuggestSelection: Annotate error. Returning: "
+                   << click;
+    *result = {};
+    return false;
+  }
+
+  for (const AnnotatedSpan &annotation : annotations) {
+    TC3_VLOG(INFO) << "POD NER SuggestSelection: " << annotation;
+    if (annotation.span.first <= click.first &&
+        annotation.span.second >= click.second) {
+      TC3_VLOG(INFO) << "POD NER SuggestSelection: Accepted.";
+      *result = annotation;
+      return true;
+    }
+  }
+
+  TC3_VLOG(INFO)
+      << "POD NER SuggestSelection: No annotation matched click. Returning: "
+      << click;
+  *result = {};
+  return false;
+}
+
+bool PodNerAnnotator::ClassifyText(const UnicodeText &context,
+                                   CodepointSpan click,
+                                   ClassificationResult *result) const {
+  TC3_VLOG(INFO) << "POD NER ClassifyText " << click;
+  std::vector<AnnotatedSpan> annotations;
+  if (!AnnotateAroundSpanOfInterest(context, click, &annotations)) {
+    return false;
+  }
+
+  for (const AnnotatedSpan &annotation : annotations) {
+    if (annotation.span.first <= click.first &&
+        annotation.span.second >= click.second) {
+      if (annotation.classification.empty()) {
+        return false;
+      }
+      *result = annotation.classification[0];
+      return true;
+    }
+  }
+  return false;
+}
+
+std::vector<std::string> PodNerAnnotator::GetSupportedCollections() const {
+  std::vector<std::string> result;
+  for (const PodNerModel_::CollectionT &collection : collections_) {
+    result.push_back(collection.name);
+  }
+  return result;
+}
+
+}  // namespace libtextclassifier3
diff --git a/native/annotator/pod_ner/pod-ner-impl.h b/native/annotator/pod_ner/pod-ner-impl.h
new file mode 100644
index 0000000..2dd2a33
--- /dev/null
+++ b/native/annotator/pod_ner/pod-ner-impl.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_IMPL_H_
+#define LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_IMPL_H_
+
+#include <memory>
+
+#include "annotator/model_generated.h"
+#include "annotator/types.h"
+#include "utils/bert_tokenizer.h"
+#include "utils/utf8/unicodetext.h"
+#include "utils/utf8/unilib.h"
+#include "tensorflow/lite/context.h"
+#include "tensorflow/lite/interpreter.h"
+#include "tensorflow/lite/kernels/register.h"
+#include "tensorflow/lite/string_util.h"
+
+namespace libtextclassifier3 {
+
+// Uses POD NER BERT-based model for annotating various types of entities.
+class PodNerAnnotator {
+ public:
+  static std::unique_ptr<PodNerAnnotator> Create(const PodNerModel *model,
+                                                 const UniLib &unilib);
+
+  bool Annotate(const UnicodeText &context,
+                std::vector<AnnotatedSpan> *results) const;
+
+  // Returns true if an entity was detected under 'click', and the selection
+  // indices expanded and assigned to 'result'. Otherwise returns false, and
+  // resets 'result'.
+  bool SuggestSelection(const UnicodeText &context, CodepointSpan click,
+                        AnnotatedSpan *result) const;
+
+  bool ClassifyText(const UnicodeText &context, CodepointSpan click,
+                    ClassificationResult *result) const;
+
+  std::vector<std::string> GetSupportedCollections() const;
+
+ private:
+  explicit PodNerAnnotator(const UniLib &unilib) : unilib_(unilib) {}
+
+  std::vector<PodNerModel_::LabelT> ReadResultsFromInterpreter(
+      tflite::Interpreter &interpreter) const;
+
+  std::vector<PodNerModel_::LabelT> ExecuteModel(
+      const VectorSpan<int> &wordpiece_indices,
+      const VectorSpan<int32_t> &token_starts,
+      const VectorSpan<Token> &tokens) const;
+
+  bool PrepareText(const UnicodeText &text_unicode,
+                   std::vector<int32_t> *wordpiece_indices,
+                   std::vector<int32_t> *token_starts,
+                   std::vector<Token> *tokens) const;
+
+  bool AnnotateAroundSpanOfInterest(const UnicodeText &context,
+                                    const CodepointSpan &span_of_interest,
+                                    std::vector<AnnotatedSpan> *results) const;
+
+  const UniLib &unilib_;
+  bool lowercase_input_;
+  int logits_index_in_output_tensor_;
+  bool append_final_period_;
+  int max_num_effective_wordpieces_;
+  int sliding_window_num_wordpieces_overlap_;
+  float max_ratio_unknown_wordpieces_;
+  int min_number_of_tokens_;
+  int min_number_of_wordpieces_;
+  int cls_wordpiece_id_;
+  int sep_wordpiece_id_;
+  int period_wordpiece_id_;
+  int unknown_wordpiece_id_;
+  std::vector<PodNerModel_::CollectionT> collections_;
+  std::vector<PodNerModel_::LabelT> labels_;
+  std::unique_ptr<BertTokenizer> tokenizer_;
+  const PodNerModel *model_;
+};
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_IMPL_H_
diff --git a/native/annotator/pod_ner/pod-ner-impl_test.cc b/native/annotator/pod_ner/pod-ner-impl_test.cc
new file mode 100644
index 0000000..c7d0bee
--- /dev/null
+++ b/native/annotator/pod_ner/pod-ner-impl_test.cc
@@ -0,0 +1,562 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/pod_ner/pod-ner-impl.h"
+
+#include <iostream>
+#include <memory>
+#include <thread>  // NOLINT(build/c++11)
+
+#include "annotator/model_generated.h"
+#include "annotator/types.h"
+#include "utils/jvm-test-utils.h"
+#include "utils/test-data-test-utils.h"
+#include "utils/tokenizer-utils.h"
+#include "utils/utf8/unicodetext.h"
+#include "utils/utf8/unilib.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+
+namespace libtextclassifier3 {
+namespace {
+
+using ::testing::IsEmpty;
+using ::testing::Not;
+
+using PodNerModel_::Label_::BoiseType;
+using PodNerModel_::Label_::BoiseType_BEGIN;
+using PodNerModel_::Label_::BoiseType_END;
+using PodNerModel_::Label_::BoiseType_INTERMEDIATE;
+using PodNerModel_::Label_::BoiseType_O;
+using PodNerModel_::Label_::BoiseType_SINGLE;
+using PodNerModel_::Label_::MentionType;
+using PodNerModel_::Label_::MentionType_NAM;
+using PodNerModel_::Label_::MentionType_NOM;
+using PodNerModel_::Label_::MentionType_UNDEFINED;
+
+constexpr int kMinNumberOfTokens = 1;
+constexpr int kMinNumberOfWordpieces = 1;
+constexpr float kDefaultPriorityScore = 0.5;
+
+class PodNerTest : public testing::Test {
+ protected:
+  PodNerTest() {
+    PodNerModelT model;
+
+    model.min_number_of_tokens = kMinNumberOfTokens;
+    model.min_number_of_wordpieces = kMinNumberOfWordpieces;
+    model.priority_score = kDefaultPriorityScore;
+
+    const std::string tflite_model_buffer =
+        GetTestFileContent("annotator/pod_ner/test_data/tflite_model.tflite");
+    model.tflite_model = std::vector<uint8_t>(tflite_model_buffer.begin(),
+                                              tflite_model_buffer.end());
+    const std::string word_piece_vocab_buffer =
+        GetTestFileContent("annotator/pod_ner/test_data/vocab.txt");
+    model.word_piece_vocab = std::vector<uint8_t>(
+        word_piece_vocab_buffer.begin(), word_piece_vocab_buffer.end());
+
+    flatbuffers::FlatBufferBuilder builder;
+    builder.Finish(PodNerModel::Pack(builder, &model));
+
+    model_buffer_ =
+        std::string(reinterpret_cast<const char*>(builder.GetBufferPointer()),
+                    builder.GetSize());
+    model_ = static_cast<const PodNerModel*>(
+        flatbuffers::GetRoot<PodNerModel>(model_buffer_.data()));
+
+    model.append_final_period = true;
+    flatbuffers::FlatBufferBuilder builder_append_final_period;
+    builder_append_final_period.Finish(
+        PodNerModel::Pack(builder_append_final_period, &model));
+
+    model_buffer_append_final_period_ =
+        std::string(reinterpret_cast<const char*>(
+                        builder_append_final_period.GetBufferPointer()),
+                    builder_append_final_period.GetSize());
+    model_append_final_period_ =
+        static_cast<const PodNerModel*>(flatbuffers::GetRoot<PodNerModel>(
+            model_buffer_append_final_period_.data()));
+
+    unilib_ = CreateUniLibForTesting();
+  }
+
+  std::string model_buffer_;
+  const PodNerModel* model_;
+  std::string model_buffer_append_final_period_;
+  const PodNerModel* model_append_final_period_;
+  std::unique_ptr<UniLib> unilib_;
+};
+
+TEST_F(PodNerTest, AnnotateSmokeTest) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(
+        UTF8ToUnicodeText("Google New York , in New York"), &annotations));
+    EXPECT_THAT(annotations, Not(IsEmpty()));
+  }
+
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(
+        UTF8ToUnicodeText("Jamie I'm in the first picture and Cameron and Zach "
+                          "are in the second "
+                          "picture."),
+        &annotations));
+    EXPECT_THAT(annotations, Not(IsEmpty()));
+  }
+}
+
+TEST_F(PodNerTest, AnnotateEmptyInput) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(""), &annotations));
+    EXPECT_THAT(annotations, IsEmpty());
+  }
+}
+
+void FillCollections(
+    const std::vector<std::string>& collection_names,
+    const std::vector<float>& single_token_priority_scores,
+    const std::vector<float>& multi_token_priority_scores,
+    std::vector<std::unique_ptr<PodNerModel_::CollectionT>>* collections) {
+  ASSERT_TRUE(collection_names.size() == single_token_priority_scores.size() &&
+              collection_names.size() == multi_token_priority_scores.size());
+  collections->clear();
+  for (int i = 0; i < collection_names.size(); ++i) {
+    collections->push_back(std::make_unique<PodNerModel_::CollectionT>());
+    collections->back()->name = collection_names[i];
+    collections->back()->single_token_priority_score =
+        single_token_priority_scores[i];
+    collections->back()->multi_token_priority_score =
+        multi_token_priority_scores[i];
+  }
+}
+
+void EmplaceToLabelVector(
+    BoiseType boise_type, MentionType mention_type, int collection_id,
+    std::vector<std::unique_ptr<PodNerModel_::LabelT>>* labels) {
+  labels->push_back(std::make_unique<PodNerModel_::LabelT>());
+  labels->back()->boise_type = boise_type;
+  labels->back()->mention_type = mention_type;
+  labels->back()->collection_id = collection_id;
+}
+
+void FillLabels(int num_collections,
+                std::vector<std::unique_ptr<PodNerModel_::LabelT>>* labels) {
+  labels->clear();
+  for (auto boise_type :
+       {BoiseType_BEGIN, BoiseType_END, BoiseType_INTERMEDIATE}) {
+    for (auto mention_type : {MentionType_NAM, MentionType_NOM}) {
+      for (int i = 0; i < num_collections - 1; ++i) {  // skip undefined
+        EmplaceToLabelVector(boise_type, mention_type, i, labels);
+      }
+    }
+  }
+  EmplaceToLabelVector(BoiseType_O, MentionType_UNDEFINED, num_collections - 1,
+                       labels);
+  for (auto mention_type : {MentionType_NAM, MentionType_NOM}) {
+    for (int i = 0; i < num_collections - 1; ++i) {  // skip undefined
+      EmplaceToLabelVector(BoiseType_SINGLE, mention_type, i, labels);
+    }
+  }
+}
+
+TEST_F(PodNerTest, AnnotateDefaultCollections) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  std::string multi_word_location = "I live in New York";
+  std::string single_word_location = "I live in Zurich";
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(multi_word_location),
+                                    &annotations));
+    EXPECT_THAT(annotations, Not(IsEmpty()));
+    EXPECT_EQ(annotations[0].classification[0].collection, "location");
+    EXPECT_EQ(annotations[0].classification[0].priority_score,
+              kDefaultPriorityScore);
+
+    annotations.clear();
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(single_word_location),
+                                    &annotations));
+    EXPECT_THAT(annotations, Not(IsEmpty()));
+    EXPECT_EQ(annotations[0].classification[0].collection, "location");
+    EXPECT_EQ(annotations[0].classification[0].priority_score,
+              kDefaultPriorityScore);
+  }
+}
+
+TEST_F(PodNerTest, AnnotateConfigurableCollections) {
+  std::unique_ptr<PodNerModelT> unpacked_model(model_->UnPack());
+  ASSERT_TRUE(unpacked_model != nullptr);
+
+  float xxx_single_token_priority = 0.9;
+  float xxx_multi_token_priority = 1.7;
+  const std::vector<std::string> collection_names = {
+      "art",          "consumer_good", "event",  "xxx",
+      "organization", "ner_entity",    "person", "undefined"};
+  FillCollections(collection_names,
+                  /*single_token_priority_scores=*/
+                  {0., 0., 0., xxx_single_token_priority, 0., 0., 0., 0.},
+                  /*multi_token_priority_scores=*/
+                  {0., 0., 0., xxx_multi_token_priority, 0., 0., 0., 0.},
+                  &(unpacked_model->collections));
+  FillLabels(collection_names.size(), &(unpacked_model->labels));
+  flatbuffers::FlatBufferBuilder builder;
+  builder.Finish(PodNerModel::Pack(builder, unpacked_model.get()));
+  std::string model_buffer =
+      std::string(reinterpret_cast<const char*>(builder.GetBufferPointer()),
+                  builder.GetSize());
+  std::unique_ptr<PodNerAnnotator> annotator = PodNerAnnotator::Create(
+      static_cast<const PodNerModel*>(
+          flatbuffers::GetRoot<PodNerModel>(model_buffer.data())),
+      *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  std::string multi_word_location = "I live in New York";
+  std::string single_word_location = "I live in Zurich";
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(multi_word_location),
+                                    &annotations));
+    EXPECT_THAT(annotations, Not(IsEmpty()));
+    EXPECT_EQ(annotations[0].classification[0].collection, "xxx");
+    EXPECT_EQ(annotations[0].classification[0].priority_score,
+              xxx_multi_token_priority);
+
+    annotations.clear();
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(single_word_location),
+                                    &annotations));
+    EXPECT_THAT(annotations, Not(IsEmpty()));
+    EXPECT_EQ(annotations[0].classification[0].collection, "xxx");
+    EXPECT_EQ(annotations[0].classification[0].priority_score,
+              xxx_single_token_priority);
+  }
+}
+
+TEST_F(PodNerTest, AnnotateMinNumTokens) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  std::string text = "in New York";
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(text), &annotations));
+    EXPECT_THAT(annotations, Not(IsEmpty()));
+  }
+
+  std::unique_ptr<PodNerModelT> unpacked_model(model_->UnPack());
+  ASSERT_TRUE(unpacked_model != nullptr);
+
+  unpacked_model->min_number_of_tokens = 4;
+  flatbuffers::FlatBufferBuilder builder;
+  builder.Finish(PodNerModel::Pack(builder, unpacked_model.get()));
+
+  std::string model_buffer =
+      std::string(reinterpret_cast<const char*>(builder.GetBufferPointer()),
+                  builder.GetSize());
+  annotator = PodNerAnnotator::Create(
+      static_cast<const PodNerModel*>(
+          flatbuffers::GetRoot<PodNerModel>(model_buffer.data())),
+      *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(text), &annotations));
+    EXPECT_THAT(annotations, IsEmpty());
+  }
+}
+
+TEST_F(PodNerTest, AnnotateMinNumWordpieces) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  std::string text = "in New York";
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(text), &annotations));
+    EXPECT_THAT(annotations, Not(IsEmpty()));
+  }
+
+  std::unique_ptr<PodNerModelT> unpacked_model(model_->UnPack());
+  ASSERT_TRUE(unpacked_model != nullptr);
+
+  unpacked_model->min_number_of_wordpieces = 10;
+  flatbuffers::FlatBufferBuilder builder;
+  builder.Finish(PodNerModel::Pack(builder, unpacked_model.get()));
+
+  std::string model_buffer =
+      std::string(reinterpret_cast<const char*>(builder.GetBufferPointer()),
+                  builder.GetSize());
+  annotator = PodNerAnnotator::Create(
+      static_cast<const PodNerModel*>(
+          flatbuffers::GetRoot<PodNerModel>(model_buffer.data())),
+      *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+  {
+    std::vector<AnnotatedSpan> annotations;
+    ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(text), &annotations));
+    EXPECT_THAT(annotations, IsEmpty());
+  }
+}
+
+TEST_F(PodNerTest, AnnotateNonstandardText) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  const std::string nonstandard_text =
+      "abcNxCDU1RWNvbXByLXI4NS8xNzcwLzE3NzA4NDY2L3J1Ymluby1raWRzLXJlY2xpbmVyLXd"
+      "pdGgtY3VwLWhvbGRlci5qcGc=/"
+      "UnViaW5vIEtpZHMgUmVjbGluZXIgd2l0aCBDdXAgSG9sZGVyIGJ5IEhhcnJpZXQgQmVl."
+      "html>";
+  std::vector<AnnotatedSpan> annotations;
+  ASSERT_TRUE(
+      annotator->Annotate(UTF8ToUnicodeText(nonstandard_text), &annotations));
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST_F(PodNerTest, AnnotateTextWithLinefeed) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  std::string nonstandard_text = "My name is Kuba\x09";
+  nonstandard_text += "and this is a test.";
+  std::vector<AnnotatedSpan> annotations;
+  ASSERT_TRUE(
+      annotator->Annotate(UTF8ToUnicodeText(nonstandard_text), &annotations));
+  EXPECT_THAT(annotations, Not(IsEmpty()));
+  EXPECT_EQ(annotations[0].span, CodepointSpan(11, 15));
+
+  nonstandard_text = "My name is Kuba\x09 and this is a test.";
+  ASSERT_TRUE(
+      annotator->Annotate(UTF8ToUnicodeText(nonstandard_text), &annotations));
+  EXPECT_THAT(annotations, Not(IsEmpty()));
+  EXPECT_EQ(annotations[0].span, CodepointSpan(11, 15));
+}
+
+TEST_F(PodNerTest, AnnotateWithUnknownWordpieces) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  const std::string long_text =
+      "It is easy to spend a fun and exciting day in Seattle without a car.  "
+      "There are lots of ways to modify this itinerary. Add a ferry ride "
+      "from the waterfront. Spending the day at the Seattle Center or at the "
+      "aquarium could easily extend this from one to several days. Take the "
+      "Underground Tour in Pioneer Square. Visit the Klondike Gold Rush "
+      "Museum which is fun and free.  In the summer months you can ride the "
+      "passenger-only Water Taxi from the waterfront to West Seattle and "
+      "Alki Beach. Here's a sample one day itinerary: Start at the Space "
+      "Needle by taking the Seattle Monorail from downtown. Look around the "
+      "Seattle Center or go to the Space Needle.";
+  const std::string text_with_unknown_wordpieces = "před chvílí";
+
+  std::vector<AnnotatedSpan> annotations;
+  ASSERT_TRUE(
+      annotator->Annotate(UTF8ToUnicodeText("Google New York , in New York. " +
+                                            text_with_unknown_wordpieces),
+                          &annotations));
+  EXPECT_THAT(annotations, IsEmpty());
+  ASSERT_TRUE(annotator->Annotate(
+      UTF8ToUnicodeText(long_text + " " + text_with_unknown_wordpieces),
+      &annotations));
+  EXPECT_THAT(annotations, Not(IsEmpty()));
+}
+
+class PodNerTestWithOrWithoutFinalPeriod
+    : public PodNerTest,
+      public testing::WithParamInterface<bool> {};
+
+INSTANTIATE_TEST_SUITE_P(TestAnnotateLongText,
+                         PodNerTestWithOrWithoutFinalPeriod,
+                         testing::Values(true, false));
+
+TEST_P(PodNerTestWithOrWithoutFinalPeriod, AnnotateLongText) {
+  std::unique_ptr<PodNerAnnotator> annotator = PodNerAnnotator::Create(
+      GetParam() ? model_append_final_period_ : model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  const std::string long_text =
+      "It is easy to spend a fun and exciting day in Seattle without a car.  "
+      "There are lots of ways to modify this itinerary. Add a ferry ride "
+      "from the waterfront. Spending the day at the Seattle Center or at the "
+      "aquarium could easily extend this from one to several days. Take the "
+      "Underground Tour in Pioneer Square. Visit the Klondike Gold Rush "
+      "Museum which is fun and free.  In the summer months you can ride the "
+      "passenger-only Water Taxi from the waterfront to West Seattle and "
+      "Alki Beach. Here's a sample one day itinerary: Start at the Space "
+      "Needle by taking the Seattle Monorail from downtown. Look around the "
+      "Seattle Center or go to the Space Needle. If you're interested in "
+      "music the EMP-SFM (Experience Music Project - Science Fiction Musuem) "
+      "is located at the foot of the Space Needle.  It has a lot of rock'n "
+      "roll memorabilia that you may find interesting.  The Chihuly Garden "
+      "and Glass musuem is near the Space Needle and you can get a "
+      "combination ticket for both.  It gets really good reviews.  If you're "
+      "interested, then the Bill & Melinda Gates Foundation is across from "
+      "the EMP and has a visitors center that is free.  Come see how Bill "
+      "Gates is giving away his millions. Take the Monorail back downtown.  "
+      "You will be at 5th and Pine (Westlake Center). Head west to the Pike "
+      "Place Market. Look around then head for the Pike Place hill climb "
+      "which is a series of steps that walk down to the waterfront. You will "
+      "end up across the street from the Seattle Aquarium. Plenty of things "
+      "to do on the waterfront, boat cruises, seafood restaurants, the "
+      "Aquarium, or your typical tourist activities. You can walk or take "
+      "the waterfront trolley bus.  Note that waterfront construction has "
+      "relocated the  trolley Metro bus route 99 that will take you from "
+      "Pioneer Square all the way to the end of the waterfront where you can "
+      "visit the Seattle Art Musuem's XXX Sculpture Garden just north of "
+      "Pier 70. The route goes thru Chinatown/International District, "
+      "through Pioneer Square, up 1st ave past the Pike Place Market and to "
+      "1st and Cedar which is walking distance to the Space Needle.  It then "
+      "goes down Broad Street toward the Olympic Sculpture Garden.   It runs "
+      "approximately every 30 minutes during the day and early evening.";
+  std::vector<AnnotatedSpan> annotations;
+  ASSERT_TRUE(annotator->Annotate(UTF8ToUnicodeText(long_text), &annotations));
+  EXPECT_THAT(annotations, Not(IsEmpty()));
+
+  const std::string location_from_beginning = "Seattle";
+  int start_span_location_from_beginning =
+      long_text.find(location_from_beginning);
+  EXPECT_EQ(annotations[0].span,
+            CodepointSpan(start_span_location_from_beginning,
+                          start_span_location_from_beginning +
+                              location_from_beginning.length()));
+
+  const std::string location_from_end = "Olympic Sculpture Garden";
+  int start_span_location_from_end = long_text.find(location_from_end);
+  const AnnotatedSpan& last_annotation = *annotations.rbegin();
+  EXPECT_EQ(
+      last_annotation.span,
+      CodepointSpan(start_span_location_from_end,
+                    start_span_location_from_end + location_from_end.length()));
+}
+
+TEST_F(PodNerTest, SuggestSelectionLongText) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  const std::string long_text =
+      "It is easy to spend a fun and exciting day in Seattle without a car.  "
+      "There are lots of ways to modify this itinerary. Add a ferry ride "
+      "from the waterfront. Spending the day at the Seattle Center or at the "
+      "aquarium could easily extend this from one to several days. Take the "
+      "Underground Tour in Pioneer Square. Visit the Klondike Gold Rush "
+      "Museum which is fun and free.  In the summer months you can ride the "
+      "passenger-only Water Taxi from the waterfront to West Seattle and "
+      "Alki Beach. Here's a sample one day itinerary: Start at the Space "
+      "Needle by taking the Seattle Monorail from downtown. Look around the "
+      "Seattle Center or go to the Space Needle. If you're interested in "
+      "music the EMP-SFM (Experience Music Project - Science Fiction Musuem) "
+      "is located at the foot of the Space Needle.  It has a lot of rock'n "
+      "roll memorabilia that you may find interesting.  The Chihuly Garden "
+      "and Glass musuem is near the Space Needle and you can get a "
+      "combination ticket for both.  It gets really good reviews.  If you're "
+      "interested, then the Bill & Melinda Gates Foundation is across from "
+      "the EMP and has a visitors center that is free.  Come see how Bill "
+      "Gates is giving away his millions. Take the Monorail back downtown.  "
+      "You will be at 5th and Pine (Westlake Center). Head west to the Pike "
+      "Place Market. Look around then head for the Pike Place hill climb "
+      "which is a series of steps that walk down to the waterfront. You will "
+      "end up across the street from the Seattle Aquarium. Plenty of things "
+      "to do on the waterfront, boat cruises, seafood restaurants, the "
+      "Aquarium, or your typical tourist activities. You can walk or take "
+      "the waterfront trolley bus.  Note that waterfront construction has "
+      "relocated the  trolley Metro bus route 99 that will take you from "
+      "Pioneer Square all the way to the end of the waterfront where you can "
+      "visit the Seattle Art Musuem's XXX Sculpture Garden just north of "
+      "Pier 70. The route goes thru Chinatown/International District, "
+      "through Pioneer Square, up 1st ave past the Pike Place Market and to "
+      "1st and Cedar which is walking distance to the Space Needle.  It then "
+      "goes down Broad Street toward the Olympic Sculpture Garden.   It runs "
+      "approximately every 30 minutes during the day and early evening.";
+  const std::string klondike = "Klondike Gold Rush Museum";
+  int klondike_start = long_text.find(klondike);
+
+  AnnotatedSpan suggested_span;
+  EXPECT_TRUE(annotator->SuggestSelection(UTF8ToUnicodeText(long_text),
+                                          {klondike_start, klondike_start + 8},
+                                          &suggested_span));
+  EXPECT_EQ(suggested_span.span,
+            CodepointSpan(klondike_start, klondike_start + klondike.length()));
+}
+
+TEST_F(PodNerTest, SuggestSelectionTest) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  AnnotatedSpan suggested_span;
+  EXPECT_TRUE(annotator->SuggestSelection(
+      UTF8ToUnicodeText("Google New York, in New York"), {7, 10},
+      &suggested_span));
+  EXPECT_EQ(suggested_span.span, CodepointSpan(7, 15));
+  EXPECT_FALSE(annotator->SuggestSelection(
+      UTF8ToUnicodeText("Google New York, in New York"), {17, 19},
+      &suggested_span));
+  EXPECT_EQ(suggested_span.span, CodepointSpan(kInvalidIndex, kInvalidIndex));
+}
+
+TEST_F(PodNerTest, ClassifyTextTest) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  ClassificationResult result;
+  ASSERT_TRUE(annotator->ClassifyText(UTF8ToUnicodeText("We met in New York"),
+                                      {10, 18}, &result));
+  EXPECT_EQ(result.collection, "location");
+}
+
+TEST_F(PodNerTest, ThreadSafety) {
+  std::unique_ptr<PodNerAnnotator> annotator =
+      PodNerAnnotator::Create(model_, *unilib_);
+  ASSERT_TRUE(annotator != nullptr);
+
+  // Do inference in 20 threads. When run with --config=tsan, this should fire
+  // if there's a problem.
+  std::vector<std::thread> thread_pool(20);
+  for (std::thread& thread : thread_pool) {
+    thread = std::thread([&annotator]() {
+      AnnotatedSpan suggested_span;
+      EXPECT_TRUE(annotator->SuggestSelection(
+          UTF8ToUnicodeText("Google New York, in New York"), {7, 10},
+          &suggested_span));
+      EXPECT_EQ(suggested_span.span, CodepointSpan(7, 15));
+    });
+  }
+  for (std::thread& thread : thread_pool) {
+    thread.join();
+  }
+}
+
+}  // namespace
+}  // namespace libtextclassifier3
diff --git a/native/annotator/pod_ner/pod-ner.h b/native/annotator/pod_ner/pod-ner.h
index 3594e6e..812e94e 100644
--- a/native/annotator/pod_ner/pod-ner.h
+++ b/native/annotator/pod_ner/pod-ner.h
@@ -17,6 +17,16 @@
 #ifndef LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_H_
 #define LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_H_
 
+#if defined TC3_POD_NER_ANNOTATOR_FLAG_DEFINED
+#include "annotator/pod_ner/pod-ner-flag-defined.h"
+#else
+#if defined TC3_POD_NER_ANNOTATOR_IMPL
+#include "annotator/pod_ner/pod-ner-impl.h"
+#elif defined TC3_POD_NER_ANNOTATOR_DUMMY
 #include "annotator/pod_ner/pod-ner-dummy.h"
+#else
+#error No POD NER implementation specified.
+#endif
+#endif  // TC3_POD_NER_ANNOTATOR_FLAG_DEFINED
 
 #endif  // LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_POD_NER_H_
diff --git a/native/annotator/pod_ner/test_data/tflite_model.tflite b/native/annotator/pod_ner/test_data/tflite_model.tflite
new file mode 100644
index 0000000..d1286a7
--- /dev/null
+++ b/native/annotator/pod_ner/test_data/tflite_model.tflite
Binary files differ
diff --git a/native/annotator/pod_ner/test_data/vocab.txt b/native/annotator/pod_ner/test_data/vocab.txt
new file mode 100644
index 0000000..fb14027
--- /dev/null
+++ b/native/annotator/pod_ner/test_data/vocab.txt
@@ -0,0 +1,30522 @@
+[PAD]
+[unused0]
+[unused1]
+[unused2]
+[unused3]
+[unused4]
+[unused5]
+[unused6]
+[unused7]
+[unused8]
+[unused9]
+[unused10]
+[unused11]
+[unused12]
+[unused13]
+[unused14]
+[unused15]
+[unused16]
+[unused17]
+[unused18]
+[unused19]
+[unused20]
+[unused21]
+[unused22]
+[unused23]
+[unused24]
+[unused25]
+[unused26]
+[unused27]
+[unused28]
+[unused29]
+[unused30]
+[unused31]
+[unused32]
+[unused33]
+[unused34]
+[unused35]
+[unused36]
+[unused37]
+[unused38]
+[unused39]
+[unused40]
+[unused41]
+[unused42]
+[unused43]
+[unused44]
+[unused45]
+[unused46]
+[unused47]
+[unused48]
+[unused49]
+[unused50]
+[unused51]
+[unused52]
+[unused53]
+[unused54]
+[unused55]
+[unused56]
+[unused57]
+[unused58]
+[unused59]
+[unused60]
+[unused61]
+[unused62]
+[unused63]
+[unused64]
+[unused65]
+[unused66]
+[unused67]
+[unused68]
+[unused69]
+[unused70]
+[unused71]
+[unused72]
+[unused73]
+[unused74]
+[unused75]
+[unused76]
+[unused77]
+[unused78]
+[unused79]
+[unused80]
+[unused81]
+[unused82]
+[unused83]
+[unused84]
+[unused85]
+[unused86]
+[unused87]
+[unused88]
+[unused89]
+[unused90]
+[unused91]
+[unused92]
+[unused93]
+[unused94]
+[unused95]
+[unused96]
+[unused97]
+[unused98]
+[UNK]
+[CLS]
+[SEP]
+[MASK]
+[unused99]
+[unused100]
+[unused101]
+[unused102]
+[unused103]
+[unused104]
+[unused105]
+[unused106]
+[unused107]
+[unused108]
+[unused109]
+[unused110]
+[unused111]
+[unused112]
+[unused113]
+[unused114]
+[unused115]
+[unused116]
+[unused117]
+[unused118]
+[unused119]
+[unused120]
+[unused121]
+[unused122]
+[unused123]
+[unused124]
+[unused125]
+[unused126]
+[unused127]
+[unused128]
+[unused129]
+[unused130]
+[unused131]
+[unused132]
+[unused133]
+[unused134]
+[unused135]
+[unused136]
+[unused137]
+[unused138]
+[unused139]
+[unused140]
+[unused141]
+[unused142]
+[unused143]
+[unused144]
+[unused145]
+[unused146]
+[unused147]
+[unused148]
+[unused149]
+[unused150]
+[unused151]
+[unused152]
+[unused153]
+[unused154]
+[unused155]
+[unused156]
+[unused157]
+[unused158]
+[unused159]
+[unused160]
+[unused161]
+[unused162]
+[unused163]
+[unused164]
+[unused165]
+[unused166]
+[unused167]
+[unused168]
+[unused169]
+[unused170]
+[unused171]
+[unused172]
+[unused173]
+[unused174]
+[unused175]
+[unused176]
+[unused177]
+[unused178]
+[unused179]
+[unused180]
+[unused181]
+[unused182]
+[unused183]
+[unused184]
+[unused185]
+[unused186]
+[unused187]
+[unused188]
+[unused189]
+[unused190]
+[unused191]
+[unused192]
+[unused193]
+[unused194]
+[unused195]
+[unused196]
+[unused197]
+[unused198]
+[unused199]
+[unused200]
+[unused201]
+[unused202]
+[unused203]
+[unused204]
+[unused205]
+[unused206]
+[unused207]
+[unused208]
+[unused209]
+[unused210]
+[unused211]
+[unused212]
+[unused213]
+[unused214]
+[unused215]
+[unused216]
+[unused217]
+[unused218]
+[unused219]
+[unused220]
+[unused221]
+[unused222]
+[unused223]
+[unused224]
+[unused225]
+[unused226]
+[unused227]
+[unused228]
+[unused229]
+[unused230]
+[unused231]
+[unused232]
+[unused233]
+[unused234]
+[unused235]
+[unused236]
+[unused237]
+[unused238]
+[unused239]
+[unused240]
+[unused241]
+[unused242]
+[unused243]
+[unused244]
+[unused245]
+[unused246]
+[unused247]
+[unused248]
+[unused249]
+[unused250]
+[unused251]
+[unused252]
+[unused253]
+[unused254]
+[unused255]
+[unused256]
+[unused257]
+[unused258]
+[unused259]
+[unused260]
+[unused261]
+[unused262]
+[unused263]
+[unused264]
+[unused265]
+[unused266]
+[unused267]
+[unused268]
+[unused269]
+[unused270]
+[unused271]
+[unused272]
+[unused273]
+[unused274]
+[unused275]
+[unused276]
+[unused277]
+[unused278]
+[unused279]
+[unused280]
+[unused281]
+[unused282]
+[unused283]
+[unused284]
+[unused285]
+[unused286]
+[unused287]
+[unused288]
+[unused289]
+[unused290]
+[unused291]
+[unused292]
+[unused293]
+[unused294]
+[unused295]
+[unused296]
+[unused297]
+[unused298]
+[unused299]
+[unused300]
+[unused301]
+[unused302]
+[unused303]
+[unused304]
+[unused305]
+[unused306]
+[unused307]
+[unused308]
+[unused309]
+[unused310]
+[unused311]
+[unused312]
+[unused313]
+[unused314]
+[unused315]
+[unused316]
+[unused317]
+[unused318]
+[unused319]
+[unused320]
+[unused321]
+[unused322]
+[unused323]
+[unused324]
+[unused325]
+[unused326]
+[unused327]
+[unused328]
+[unused329]
+[unused330]
+[unused331]
+[unused332]
+[unused333]
+[unused334]
+[unused335]
+[unused336]
+[unused337]
+[unused338]
+[unused339]
+[unused340]
+[unused341]
+[unused342]
+[unused343]
+[unused344]
+[unused345]
+[unused346]
+[unused347]
+[unused348]
+[unused349]
+[unused350]
+[unused351]
+[unused352]
+[unused353]
+[unused354]
+[unused355]
+[unused356]
+[unused357]
+[unused358]
+[unused359]
+[unused360]
+[unused361]
+[unused362]
+[unused363]
+[unused364]
+[unused365]
+[unused366]
+[unused367]
+[unused368]
+[unused369]
+[unused370]
+[unused371]
+[unused372]
+[unused373]
+[unused374]
+[unused375]
+[unused376]
+[unused377]
+[unused378]
+[unused379]
+[unused380]
+[unused381]
+[unused382]
+[unused383]
+[unused384]
+[unused385]
+[unused386]
+[unused387]
+[unused388]
+[unused389]
+[unused390]
+[unused391]
+[unused392]
+[unused393]
+[unused394]
+[unused395]
+[unused396]
+[unused397]
+[unused398]
+[unused399]
+[unused400]
+[unused401]
+[unused402]
+[unused403]
+[unused404]
+[unused405]
+[unused406]
+[unused407]
+[unused408]
+[unused409]
+[unused410]
+[unused411]
+[unused412]
+[unused413]
+[unused414]
+[unused415]
+[unused416]
+[unused417]
+[unused418]
+[unused419]
+[unused420]
+[unused421]
+[unused422]
+[unused423]
+[unused424]
+[unused425]
+[unused426]
+[unused427]
+[unused428]
+[unused429]
+[unused430]
+[unused431]
+[unused432]
+[unused433]
+[unused434]
+[unused435]
+[unused436]
+[unused437]
+[unused438]
+[unused439]
+[unused440]
+[unused441]
+[unused442]
+[unused443]
+[unused444]
+[unused445]
+[unused446]
+[unused447]
+[unused448]
+[unused449]
+[unused450]
+[unused451]
+[unused452]
+[unused453]
+[unused454]
+[unused455]
+[unused456]
+[unused457]
+[unused458]
+[unused459]
+[unused460]
+[unused461]
+[unused462]
+[unused463]
+[unused464]
+[unused465]
+[unused466]
+[unused467]
+[unused468]
+[unused469]
+[unused470]
+[unused471]
+[unused472]
+[unused473]
+[unused474]
+[unused475]
+[unused476]
+[unused477]
+[unused478]
+[unused479]
+[unused480]
+[unused481]
+[unused482]
+[unused483]
+[unused484]
+[unused485]
+[unused486]
+[unused487]
+[unused488]
+[unused489]
+[unused490]
+[unused491]
+[unused492]
+[unused493]
+[unused494]
+[unused495]
+[unused496]
+[unused497]
+[unused498]
+[unused499]
+[unused500]
+[unused501]
+[unused502]
+[unused503]
+[unused504]
+[unused505]
+[unused506]
+[unused507]
+[unused508]
+[unused509]
+[unused510]
+[unused511]
+[unused512]
+[unused513]
+[unused514]
+[unused515]
+[unused516]
+[unused517]
+[unused518]
+[unused519]
+[unused520]
+[unused521]
+[unused522]
+[unused523]
+[unused524]
+[unused525]
+[unused526]
+[unused527]
+[unused528]
+[unused529]
+[unused530]
+[unused531]
+[unused532]
+[unused533]
+[unused534]
+[unused535]
+[unused536]
+[unused537]
+[unused538]
+[unused539]
+[unused540]
+[unused541]
+[unused542]
+[unused543]
+[unused544]
+[unused545]
+[unused546]
+[unused547]
+[unused548]
+[unused549]
+[unused550]
+[unused551]
+[unused552]
+[unused553]
+[unused554]
+[unused555]
+[unused556]
+[unused557]
+[unused558]
+[unused559]
+[unused560]
+[unused561]
+[unused562]
+[unused563]
+[unused564]
+[unused565]
+[unused566]
+[unused567]
+[unused568]
+[unused569]
+[unused570]
+[unused571]
+[unused572]
+[unused573]
+[unused574]
+[unused575]
+[unused576]
+[unused577]
+[unused578]
+[unused579]
+[unused580]
+[unused581]
+[unused582]
+[unused583]
+[unused584]
+[unused585]
+[unused586]
+[unused587]
+[unused588]
+[unused589]
+[unused590]
+[unused591]
+[unused592]
+[unused593]
+[unused594]
+[unused595]
+[unused596]
+[unused597]
+[unused598]
+[unused599]
+[unused600]
+[unused601]
+[unused602]
+[unused603]
+[unused604]
+[unused605]
+[unused606]
+[unused607]
+[unused608]
+[unused609]
+[unused610]
+[unused611]
+[unused612]
+[unused613]
+[unused614]
+[unused615]
+[unused616]
+[unused617]
+[unused618]
+[unused619]
+[unused620]
+[unused621]
+[unused622]
+[unused623]
+[unused624]
+[unused625]
+[unused626]
+[unused627]
+[unused628]
+[unused629]
+[unused630]
+[unused631]
+[unused632]
+[unused633]
+[unused634]
+[unused635]
+[unused636]
+[unused637]
+[unused638]
+[unused639]
+[unused640]
+[unused641]
+[unused642]
+[unused643]
+[unused644]
+[unused645]
+[unused646]
+[unused647]
+[unused648]
+[unused649]
+[unused650]
+[unused651]
+[unused652]
+[unused653]
+[unused654]
+[unused655]
+[unused656]
+[unused657]
+[unused658]
+[unused659]
+[unused660]
+[unused661]
+[unused662]
+[unused663]
+[unused664]
+[unused665]
+[unused666]
+[unused667]
+[unused668]
+[unused669]
+[unused670]
+[unused671]
+[unused672]
+[unused673]
+[unused674]
+[unused675]
+[unused676]
+[unused677]
+[unused678]
+[unused679]
+[unused680]
+[unused681]
+[unused682]
+[unused683]
+[unused684]
+[unused685]
+[unused686]
+[unused687]
+[unused688]
+[unused689]
+[unused690]
+[unused691]
+[unused692]
+[unused693]
+[unused694]
+[unused695]
+[unused696]
+[unused697]
+[unused698]
+[unused699]
+[unused700]
+[unused701]
+[unused702]
+[unused703]
+[unused704]
+[unused705]
+[unused706]
+[unused707]
+[unused708]
+[unused709]
+[unused710]
+[unused711]
+[unused712]
+[unused713]
+[unused714]
+[unused715]
+[unused716]
+[unused717]
+[unused718]
+[unused719]
+[unused720]
+[unused721]
+[unused722]
+[unused723]
+[unused724]
+[unused725]
+[unused726]
+[unused727]
+[unused728]
+[unused729]
+[unused730]
+[unused731]
+[unused732]
+[unused733]
+[unused734]
+[unused735]
+[unused736]
+[unused737]
+[unused738]
+[unused739]
+[unused740]
+[unused741]
+[unused742]
+[unused743]
+[unused744]
+[unused745]
+[unused746]
+[unused747]
+[unused748]
+[unused749]
+[unused750]
+[unused751]
+[unused752]
+[unused753]
+[unused754]
+[unused755]
+[unused756]
+[unused757]
+[unused758]
+[unused759]
+[unused760]
+[unused761]
+[unused762]
+[unused763]
+[unused764]
+[unused765]
+[unused766]
+[unused767]
+[unused768]
+[unused769]
+[unused770]
+[unused771]
+[unused772]
+[unused773]
+[unused774]
+[unused775]
+[unused776]
+[unused777]
+[unused778]
+[unused779]
+[unused780]
+[unused781]
+[unused782]
+[unused783]
+[unused784]
+[unused785]
+[unused786]
+[unused787]
+[unused788]
+[unused789]
+[unused790]
+[unused791]
+[unused792]
+[unused793]
+[unused794]
+[unused795]
+[unused796]
+[unused797]
+[unused798]
+[unused799]
+[unused800]
+[unused801]
+[unused802]
+[unused803]
+[unused804]
+[unused805]
+[unused806]
+[unused807]
+[unused808]
+[unused809]
+[unused810]
+[unused811]
+[unused812]
+[unused813]
+[unused814]
+[unused815]
+[unused816]
+[unused817]
+[unused818]
+[unused819]
+[unused820]
+[unused821]
+[unused822]
+[unused823]
+[unused824]
+[unused825]
+[unused826]
+[unused827]
+[unused828]
+[unused829]
+[unused830]
+[unused831]
+[unused832]
+[unused833]
+[unused834]
+[unused835]
+[unused836]
+[unused837]
+[unused838]
+[unused839]
+[unused840]
+[unused841]
+[unused842]
+[unused843]
+[unused844]
+[unused845]
+[unused846]
+[unused847]
+[unused848]
+[unused849]
+[unused850]
+[unused851]
+[unused852]
+[unused853]
+[unused854]
+[unused855]
+[unused856]
+[unused857]
+[unused858]
+[unused859]
+[unused860]
+[unused861]
+[unused862]
+[unused863]
+[unused864]
+[unused865]
+[unused866]
+[unused867]
+[unused868]
+[unused869]
+[unused870]
+[unused871]
+[unused872]
+[unused873]
+[unused874]
+[unused875]
+[unused876]
+[unused877]
+[unused878]
+[unused879]
+[unused880]
+[unused881]
+[unused882]
+[unused883]
+[unused884]
+[unused885]
+[unused886]
+[unused887]
+[unused888]
+[unused889]
+[unused890]
+[unused891]
+[unused892]
+[unused893]
+[unused894]
+[unused895]
+[unused896]
+[unused897]
+[unused898]
+[unused899]
+[unused900]
+[unused901]
+[unused902]
+[unused903]
+[unused904]
+[unused905]
+[unused906]
+[unused907]
+[unused908]
+[unused909]
+[unused910]
+[unused911]
+[unused912]
+[unused913]
+[unused914]
+[unused915]
+[unused916]
+[unused917]
+[unused918]
+[unused919]
+[unused920]
+[unused921]
+[unused922]
+[unused923]
+[unused924]
+[unused925]
+[unused926]
+[unused927]
+[unused928]
+[unused929]
+[unused930]
+[unused931]
+[unused932]
+[unused933]
+[unused934]
+[unused935]
+[unused936]
+[unused937]
+[unused938]
+[unused939]
+[unused940]
+[unused941]
+[unused942]
+[unused943]
+[unused944]
+[unused945]
+[unused946]
+[unused947]
+[unused948]
+[unused949]
+[unused950]
+[unused951]
+[unused952]
+[unused953]
+[unused954]
+[unused955]
+[unused956]
+[unused957]
+[unused958]
+[unused959]
+[unused960]
+[unused961]
+[unused962]
+[unused963]
+[unused964]
+[unused965]
+[unused966]
+[unused967]
+[unused968]
+[unused969]
+[unused970]
+[unused971]
+[unused972]
+[unused973]
+[unused974]
+[unused975]
+[unused976]
+[unused977]
+[unused978]
+[unused979]
+[unused980]
+[unused981]
+[unused982]
+[unused983]
+[unused984]
+[unused985]
+[unused986]
+[unused987]
+[unused988]
+[unused989]
+[unused990]
+[unused991]
+[unused992]
+[unused993]
+!
+"
+#
+$
+%
+&
+'
+(
+)
+*
++
+,
+-
+.
+/
+0
+1
+2
+3
+4
+5
+6
+7
+8
+9
+:
+;
+<
+=
+>
+?
+@
+[
+\
+]
+^
+_
+`
+a
+b
+c
+d
+e
+f
+g
+h
+i
+j
+k
+l
+m
+n
+o
+p
+q
+r
+s
+t
+u
+v
+w
+x
+y
+z
+{
+|
+}
+~



























+¿





















































+ʿ




















+ο
























+п









































+տ










































































+अ
+आ
+उ
+ए
+क
+ख
+ग
+च
+ज
+ट
+ड
+ण
+त
+थ
+द
+ध
+न
+प
+ब
+भ
+म
+य
+र
+ल
+व
+श
+ष
+स
+ह
+ा
+ि
+ी
+ो
+।
+॥
+ং
+অ
+আ
+ই
+উ
+এ
+ও
+ক
+খ
+গ
+চ
+ছ
+জ
+ট
+ড
+ণ
+ত
+থ
+দ
+ধ
+ন
+প
+ব
+ভ
+ম
+য
+র
+ল
+শ
+ষ
+স
+হ
+া
+ি
+ী
+ে
+க
+ச
+ட
+த
+ந
+ன
+ப
+ம
+ய
+ர
+ல
+ள
+வ
+ா
+ி
+ு
+ே
+ை
+ನ
+ರ
+ಾ
+ක
+ය
+ර
+ල
+ව
+ා
+ก
+ง
+ต
+ท
+น
+พ
+ม
+ย
+ร
+ล
+ว
+ส
+อ
+า
+เ
+་
+།
+ག
+ང
+ད
+ན
+པ
+བ
+མ
+འ
+ར
+ལ
+ས
+မ
+ა
+ბ
+გ
+დ
+ე
+ვ
+თ
+ი
+კ
+ლ
+მ
+ნ
+ო
+რ
+ს
+ტ
+უ
+ᄀ
+ᄂ
+ᄃ
+ᄅ
+ᄆ
+ᄇ
+ᄉ
+ᄊ
+ᄋ
+ᄌ
+ᄎ
+ᄏ
+ᄐ
+ᄑ
+ᄒ
+ᅡ
+ᅢ
+ᅥ
+ᅦ
+ᅧ
+ᅩ
+ᅪ
+ᅭ
+ᅮ
+ᅯ
+ᅲ
+ᅳ
+ᅴ
+ᅵ
+ᆨ
+ᆫ
+ᆯ
+ᆷ
+ᆸ
+ᆼ
+ᴬ
+ᴮ
+ᴰ
+ᴵ
+ᴺ
+ᵀ
+ᵃ
+ᵇ
+ᵈ
+ᵉ
+ᵍ
+ᵏ
+ᵐ
+ᵒ
+ᵖ
+ᵗ
+ᵘ
+ᵢ
+ᵣ
+ᵤ
+ᵥ
+ᶜ
+ᶠ
+‐
+‑
+‒
+–
+—
+―
+‖
+‘
+’
+‚
+“
+”
+„
+†
+‡
+•
+…
+‰
+′
+″
+›
+‿
+⁄
+⁰
+ⁱ
+⁴
+⁵
+⁶
+⁷
+⁸
+⁹
+⁺
+⁻
+ⁿ
+₀
+₁
+₂
+₃
+₄
+₅
+₆
+₇
+₈
+₉
+₊
+₍
+₎
+ₐ
+ₑ
+ₒ
+ₓ
+ₕ
+ₖ
+ₗ
+ₘ
+ₙ
+ₚ
+ₛ
+ₜ
+₤
+₩
+€
+₱
+₹
+ℓ
+№
+ℝ
+™
+⅓
+⅔
+←
+↑
+→
+↓
+↔
+↦
+⇄
+⇌
+⇒
+∂
+∅
+∆
+∇
+∈
+−
+∗
+∘
+√
+∞
+∧
+∨
+∩
+∪
+≈
+≡
+≤
+≥
+⊂
+⊆
+⊕
+⊗
+⋅
+─
+│
+■
+▪
+●
+★
+☆
+☉
+♠
+♣
+♥
+♦
+♭
+♯
+⟨
+⟩
+ⱼ
+⺩
+⺼
+⽥
+、
+。
+〈
+〉
+《
+》
+「
+」
+『
+』
+〜
+あ
+い
+う
+え
+お
+か
+き
+く
+け
+こ
+さ
+し
+す
+せ
+そ
+た
+ち
+っ
+つ
+て
+と
+な
+に
+ぬ
+ね
+の
+は
+ひ
+ふ
+へ
+ほ
+ま
+み
+む
+め
+も
+や
+ゆ
+よ
+ら
+り
+る
+れ
+ろ
+を
+ん
+ァ
+ア
+ィ
+イ
+ウ
+ェ
+エ
+オ
+カ
+キ
+ク
+ケ
+コ
+サ
+シ
+ス
+セ
+タ
+チ
+ッ
+ツ
+テ
+ト
+ナ
+ニ
+ノ
+ハ
+ヒ
+フ
+ヘ
+ホ
+マ
+ミ
+ム
+メ
+モ
+ャ
+ュ
+ョ
+ラ
+リ
+ル
+レ
+ロ
+ワ
+ン
+・
+ー
+一
+三
+上
+下
+不
+世
+中
+主
+久
+之
+也
+事
+二
+五
+井
+京
+人
+亻
+仁
+介
+代
+仮
+伊
+会
+佐
+侍
+保
+信
+健
+元
+光
+八
+公
+内
+出
+分
+前
+劉
+力
+加
+勝
+北
+区
+十
+千
+南
+博
+原
+口
+古
+史
+司
+合
+吉
+同
+名
+和
+囗
+四
+国
+國
+土
+地
+坂
+城
+堂
+場
+士
+夏
+外
+大
+天
+太
+夫
+奈
+女
+子
+学
+宀
+宇
+安
+宗
+定
+宣
+宮
+家
+宿
+寺
+將
+小
+尚
+山
+岡
+島
+崎
+川
+州
+巿
+帝
+平
+年
+幸
+广
+弘
+張
+彳
+後
+御
+德
+心
+忄
+志
+忠
+愛
+成
+我
+戦
+戸
+手
+扌
+政
+文
+新
+方
+日
+明
+星
+春
+昭
+智
+曲
+書
+月
+有
+朝
+木
+本
+李
+村
+東
+松
+林
+森
+楊
+樹
+橋
+歌
+止
+正
+武
+比
+氏
+民
+水
+氵
+氷
+永
+江
+沢
+河
+治
+法
+海
+清
+漢
+瀬
+火
+版
+犬
+王
+生
+田
+男
+疒
+発
+白
+的
+皇
+目
+相
+省
+真
+石
+示
+社
+神
+福
+禾
+秀
+秋
+空
+立
+章
+竹
+糹
+美
+義
+耳
+良
+艹
+花
+英
+華
+葉
+藤
+行
+街
+西
+見
+訁
+語
+谷
+貝
+貴
+車
+軍
+辶
+道
+郎
+郡
+部
+都
+里
+野
+金
+鈴
+镇
+長
+門
+間
+阝
+阿
+陳
+陽
+雄
+青
+面
+風
+食
+香
+馬
+高
+龍
+龸
+fi
+fl
+!
+(
+)
+,
+-
+.
+/
+:
+?
+~
+the
+of
+and
+in
+to
+was
+he
+is
+as
+for
+on
+with
+that
+it
+his
+by
+at
+from
+her
+##s
+she
+you
+had
+an
+were
+but
+be
+this
+are
+not
+my
+they
+one
+which
+or
+have
+him
+me
+first
+all
+also
+their
+has
+up
+who
+out
+been
+when
+after
+there
+into
+new
+two
+its
+##a
+time
+would
+no
+what
+about
+said
+we
+over
+then
+other
+so
+more
+##e
+can
+if
+like
+back
+them
+only
+some
+could
+##i
+where
+just
+##ing
+during
+before
+##n
+do
+##o
+made
+school
+through
+than
+now
+years
+most
+world
+may
+between
+down
+well
+three
+##d
+year
+while
+will
+##ed
+##r
+##y
+later
+##t
+city
+under
+around
+did
+such
+being
+used
+state
+people
+part
+know
+against
+your
+many
+second
+university
+both
+national
+##er
+these
+don
+known
+off
+way
+until
+re
+how
+even
+get
+head
+...
+didn
+##ly
+team
+american
+because
+de
+##l
+born
+united
+film
+since
+still
+long
+work
+south
+us
+became
+any
+high
+again
+day
+family
+see
+right
+man
+eyes
+house
+season
+war
+states
+including
+took
+life
+north
+same
+each
+called
+name
+much
+place
+however
+go
+four
+group
+another
+found
+won
+area
+here
+going
+10
+away
+series
+left
+home
+music
+best
+make
+hand
+number
+company
+several
+never
+last
+john
+000
+very
+album
+take
+end
+good
+too
+following
+released
+game
+played
+little
+began
+district
+##m
+old
+want
+those
+side
+held
+own
+early
+county
+ll
+league
+use
+west
+##u
+face
+think
+##es
+2010
+government
+##h
+march
+came
+small
+general
+town
+june
+##on
+line
+based
+something
+##k
+september
+thought
+looked
+along
+international
+2011
+air
+july
+club
+went
+january
+october
+our
+august
+april
+york
+12
+few
+2012
+2008
+east
+show
+member
+college
+2009
+father
+public
+##us
+come
+men
+five
+set
+station
+church
+##c
+next
+former
+november
+room
+party
+located
+december
+2013
+age
+got
+2007
+##g
+system
+let
+love
+2006
+though
+every
+2014
+look
+song
+water
+century
+without
+body
+black
+night
+within
+great
+women
+single
+ve
+building
+large
+population
+river
+named
+band
+white
+started
+##an
+once
+15
+20
+should
+18
+2015
+service
+top
+built
+british
+open
+death
+king
+moved
+local
+times
+children
+february
+book
+why
+11
+door
+need
+president
+order
+final
+road
+wasn
+although
+due
+major
+died
+village
+third
+knew
+2016
+asked
+turned
+st
+wanted
+say
+##p
+together
+received
+main
+son
+served
+different
+##en
+behind
+himself
+felt
+members
+power
+football
+law
+voice
+play
+##in
+near
+park
+history
+30
+having
+2005
+16
+##man
+saw
+mother
+##al
+army
+point
+front
+help
+english
+street
+art
+late
+hands
+games
+award
+##ia
+young
+14
+put
+published
+country
+division
+across
+told
+13
+often
+ever
+french
+london
+center
+six
+red
+2017
+led
+days
+include
+light
+25
+find
+tell
+among
+species
+really
+according
+central
+half
+2004
+form
+original
+gave
+office
+making
+enough
+lost
+full
+opened
+must
+included
+live
+given
+german
+player
+run
+business
+woman
+community
+cup
+might
+million
+land
+2000
+court
+development
+17
+short
+round
+ii
+km
+seen
+class
+story
+always
+become
+sure
+research
+almost
+director
+council
+la
+##2
+career
+things
+using
+island
+##z
+couldn
+car
+##is
+24
+close
+force
+##1
+better
+free
+support
+control
+field
+students
+2003
+education
+married
+##b
+nothing
+worked
+others
+record
+big
+inside
+level
+anything
+continued
+give
+james
+##3
+military
+established
+non
+returned
+feel
+does
+title
+written
+thing
+feet
+william
+far
+co
+association
+hard
+already
+2002
+##ra
+championship
+human
+western
+100
+##na
+department
+hall
+role
+various
+production
+21
+19
+heart
+2001
+living
+fire
+version
+##ers
+##f
+television
+royal
+##4
+produced
+working
+act
+case
+society
+region
+present
+radio
+period
+looking
+least
+total
+keep
+england
+wife
+program
+per
+brother
+mind
+special
+22
+##le
+am
+works
+soon
+##6
+political
+george
+services
+taken
+created
+##7
+further
+able
+reached
+david
+union
+joined
+upon
+done
+important
+social
+information
+either
+##ic
+##x
+appeared
+position
+ground
+lead
+rock
+dark
+election
+23
+board
+france
+hair
+course
+arms
+site
+police
+girl
+instead
+real
+sound
+##v
+words
+moment
+##te
+someone
+##8
+summer
+project
+announced
+san
+less
+wrote
+past
+followed
+##5
+blue
+founded
+al
+finally
+india
+taking
+records
+america
+##ne
+1999
+design
+considered
+northern
+god
+stop
+battle
+toward
+european
+outside
+described
+track
+today
+playing
+language
+28
+call
+26
+heard
+professional
+low
+australia
+miles
+california
+win
+yet
+green
+##ie
+trying
+blood
+##ton
+southern
+science
+maybe
+everything
+match
+square
+27
+mouth
+video
+race
+recorded
+leave
+above
+##9
+daughter
+points
+space
+1998
+museum
+change
+middle
+common
+##0
+move
+tv
+post
+##ta
+lake
+seven
+tried
+elected
+closed
+ten
+paul
+minister
+##th
+months
+start
+chief
+return
+canada
+person
+sea
+release
+similar
+modern
+brought
+rest
+hit
+formed
+mr
+##la
+1997
+floor
+event
+doing
+thomas
+1996
+robert
+care
+killed
+training
+star
+week
+needed
+turn
+finished
+railway
+rather
+news
+health
+sent
+example
+ran
+term
+michael
+coming
+currently
+yes
+forces
+despite
+gold
+areas
+50
+stage
+fact
+29
+dead
+says
+popular
+2018
+originally
+germany
+probably
+developed
+result
+pulled
+friend
+stood
+money
+running
+mi
+signed
+word
+songs
+child
+eventually
+met
+tour
+average
+teams
+minutes
+festival
+current
+deep
+kind
+1995
+decided
+usually
+eastern
+seemed
+##ness
+episode
+bed
+added
+table
+indian
+private
+charles
+route
+available
+idea
+throughout
+centre
+addition
+appointed
+style
+1994
+books
+eight
+construction
+press
+mean
+wall
+friends
+remained
+schools
+study
+##ch
+##um
+institute
+oh
+chinese
+sometimes
+events
+possible
+1992
+australian
+type
+brown
+forward
+talk
+process
+food
+debut
+seat
+performance
+committee
+features
+character
+arts
+herself
+else
+lot
+strong
+russian
+range
+hours
+peter
+arm
+##da
+morning
+dr
+sold
+##ry
+quickly
+directed
+1993
+guitar
+china
+##w
+31
+list
+##ma
+performed
+media
+uk
+players
+smile
+##rs
+myself
+40
+placed
+coach
+province
+towards
+wouldn
+leading
+whole
+boy
+official
+designed
+grand
+census
+##el
+europe
+attack
+japanese
+henry
+1991
+##re
+##os
+cross
+getting
+alone
+action
+lower
+network
+wide
+washington
+japan
+1990
+hospital
+believe
+changed
+sister
+##ar
+hold
+gone
+sir
+hadn
+ship
+##ka
+studies
+academy
+shot
+rights
+below
+base
+bad
+involved
+kept
+largest
+##ist
+bank
+future
+especially
+beginning
+mark
+movement
+section
+female
+magazine
+plan
+professor
+lord
+longer
+##ian
+sat
+walked
+hill
+actually
+civil
+energy
+model
+families
+size
+thus
+aircraft
+completed
+includes
+data
+captain
+##or
+fight
+vocals
+featured
+richard
+bridge
+fourth
+1989
+officer
+stone
+hear
+##ism
+means
+medical
+groups
+management
+self
+lips
+competition
+entire
+lived
+technology
+leaving
+federal
+tournament
+bit
+passed
+hot
+independent
+awards
+kingdom
+mary
+spent
+fine
+doesn
+reported
+##ling
+jack
+fall
+raised
+itself
+stay
+true
+studio
+1988
+sports
+replaced
+paris
+systems
+saint
+leader
+theatre
+whose
+market
+capital
+parents
+spanish
+canadian
+earth
+##ity
+cut
+degree
+writing
+bay
+christian
+awarded
+natural
+higher
+bill
+##as
+coast
+provided
+previous
+senior
+ft
+valley
+organization
+stopped
+onto
+countries
+parts
+conference
+queen
+security
+interest
+saying
+allowed
+master
+earlier
+phone
+matter
+smith
+winning
+try
+happened
+moving
+campaign
+los
+##ley
+breath
+nearly
+mid
+1987
+certain
+girls
+date
+italian
+african
+standing
+fell
+artist
+##ted
+shows
+deal
+mine
+industry
+1986
+##ng
+everyone
+republic
+provide
+collection
+library
+student
+##ville
+primary
+owned
+older
+via
+heavy
+1st
+makes
+##able
+attention
+anyone
+africa
+##ri
+stated
+length
+ended
+fingers
+command
+staff
+skin
+foreign
+opening
+governor
+okay
+medal
+kill
+sun
+cover
+job
+1985
+introduced
+chest
+hell
+feeling
+##ies
+success
+meet
+reason
+standard
+meeting
+novel
+1984
+trade
+source
+buildings
+##land
+rose
+guy
+goal
+##ur
+chapter
+native
+husband
+previously
+unit
+limited
+entered
+weeks
+producer
+operations
+mountain
+takes
+covered
+forced
+related
+roman
+complete
+successful
+key
+texas
+cold
+##ya
+channel
+1980
+traditional
+films
+dance
+clear
+approximately
+500
+nine
+van
+prince
+question
+active
+tracks
+ireland
+regional
+silver
+author
+personal
+sense
+operation
+##ine
+economic
+1983
+holding
+twenty
+isbn
+additional
+speed
+hour
+edition
+regular
+historic
+places
+whom
+shook
+movie
+km²
+secretary
+prior
+report
+chicago
+read
+foundation
+view
+engine
+scored
+1982
+units
+ask
+airport
+property
+ready
+immediately
+lady
+month
+listed
+contract
+##de
+manager
+themselves
+lines
+##ki
+navy
+writer
+meant
+##ts
+runs
+##ro
+practice
+championships
+singer
+glass
+commission
+required
+forest
+starting
+culture
+generally
+giving
+access
+attended
+test
+couple
+stand
+catholic
+martin
+caught
+executive
+##less
+eye
+##ey
+thinking
+chair
+quite
+shoulder
+1979
+hope
+decision
+plays
+defeated
+municipality
+whether
+structure
+offered
+slowly
+pain
+ice
+direction
+##ion
+paper
+mission
+1981
+mostly
+200
+noted
+individual
+managed
+nature
+lives
+plant
+##ha
+helped
+except
+studied
+computer
+figure
+relationship
+issue
+significant
+loss
+die
+smiled
+gun
+ago
+highest
+1972
+##am
+male
+bring
+goals
+mexico
+problem
+distance
+commercial
+completely
+location
+annual
+famous
+drive
+1976
+neck
+1978
+surface
+caused
+italy
+understand
+greek
+highway
+wrong
+hotel
+comes
+appearance
+joseph
+double
+issues
+musical
+companies
+castle
+income
+review
+assembly
+bass
+initially
+parliament
+artists
+experience
+1974
+particular
+walk
+foot
+engineering
+talking
+window
+dropped
+##ter
+miss
+baby
+boys
+break
+1975
+stars
+edge
+remember
+policy
+carried
+train
+stadium
+bar
+sex
+angeles
+evidence
+##ge
+becoming
+assistant
+soviet
+1977
+upper
+step
+wing
+1970
+youth
+financial
+reach
+##ll
+actor
+numerous
+##se
+##st
+nodded
+arrived
+##ation
+minute
+##nt
+believed
+sorry
+complex
+beautiful
+victory
+associated
+temple
+1968
+1973
+chance
+perhaps
+metal
+##son
+1945
+bishop
+##et
+lee
+launched
+particularly
+tree
+le
+retired
+subject
+prize
+contains
+yeah
+theory
+empire
+##ce
+suddenly
+waiting
+trust
+recording
+##to
+happy
+terms
+camp
+champion
+1971
+religious
+pass
+zealand
+names
+2nd
+port
+ancient
+tom
+corner
+represented
+watch
+legal
+anti
+justice
+cause
+watched
+brothers
+45
+material
+changes
+simply
+response
+louis
+fast
+##ting
+answer
+60
+historical
+1969
+stories
+straight
+create
+feature
+increased
+rate
+administration
+virginia
+el
+activities
+cultural
+overall
+winner
+programs
+basketball
+legs
+guard
+beyond
+cast
+doctor
+mm
+flight
+results
+remains
+cost
+effect
+winter
+##ble
+larger
+islands
+problems
+chairman
+grew
+commander
+isn
+1967
+pay
+failed
+selected
+hurt
+fort
+box
+regiment
+majority
+journal
+35
+edward
+plans
+##ke
+##ni
+shown
+pretty
+irish
+characters
+directly
+scene
+likely
+operated
+allow
+spring
+##j
+junior
+matches
+looks
+mike
+houses
+fellow
+##tion
+beach
+marriage
+##ham
+##ive
+rules
+oil
+65
+florida
+expected
+nearby
+congress
+sam
+peace
+recent
+iii
+wait
+subsequently
+cell
+##do
+variety
+serving
+agreed
+please
+poor
+joe
+pacific
+attempt
+wood
+democratic
+piece
+prime
+##ca
+rural
+mile
+touch
+appears
+township
+1964
+1966
+soldiers
+##men
+##ized
+1965
+pennsylvania
+closer
+fighting
+claimed
+score
+jones
+physical
+editor
+##ous
+filled
+genus
+specific
+sitting
+super
+mom
+##va
+therefore
+supported
+status
+fear
+cases
+store
+meaning
+wales
+minor
+spain
+tower
+focus
+vice
+frank
+follow
+parish
+separate
+golden
+horse
+fifth
+remaining
+branch
+32
+presented
+stared
+##id
+uses
+secret
+forms
+##co
+baseball
+exactly
+##ck
+choice
+note
+discovered
+travel
+composed
+truth
+russia
+ball
+color
+kiss
+dad
+wind
+continue
+ring
+referred
+numbers
+digital
+greater
+##ns
+metres
+slightly
+direct
+increase
+1960
+responsible
+crew
+rule
+trees
+troops
+##no
+broke
+goes
+individuals
+hundred
+weight
+creek
+sleep
+memory
+defense
+provides
+ordered
+code
+value
+jewish
+windows
+1944
+safe
+judge
+whatever
+corps
+realized
+growing
+pre
+##ga
+cities
+alexander
+gaze
+lies
+spread
+scott
+letter
+showed
+situation
+mayor
+transport
+watching
+workers
+extended
+##li
+expression
+normal
+##ment
+chart
+multiple
+border
+##ba
+host
+##ner
+daily
+mrs
+walls
+piano
+##ko
+heat
+cannot
+##ate
+earned
+products
+drama
+era
+authority
+seasons
+join
+grade
+##io
+sign
+difficult
+machine
+1963
+territory
+mainly
+##wood
+stations
+squadron
+1962
+stepped
+iron
+19th
+##led
+serve
+appear
+sky
+speak
+broken
+charge
+knowledge
+kilometres
+removed
+ships
+article
+campus
+simple
+##ty
+pushed
+britain
+##ve
+leaves
+recently
+cd
+soft
+boston
+latter
+easy
+acquired
+poland
+##sa
+quality
+officers
+presence
+planned
+nations
+mass
+broadcast
+jean
+share
+image
+influence
+wild
+offer
+emperor
+electric
+reading
+headed
+ability
+promoted
+yellow
+ministry
+1942
+throat
+smaller
+politician
+##by
+latin
+spoke
+cars
+williams
+males
+lack
+pop
+80
+##ier
+acting
+seeing
+consists
+##ti
+estate
+1961
+pressure
+johnson
+newspaper
+jr
+chris
+olympics
+online
+conditions
+beat
+elements
+walking
+vote
+##field
+needs
+carolina
+text
+featuring
+global
+block
+shirt
+levels
+francisco
+purpose
+females
+et
+dutch
+duke
+ahead
+gas
+twice
+safety
+serious
+turning
+highly
+lieutenant
+firm
+maria
+amount
+mixed
+daniel
+proposed
+perfect
+agreement
+affairs
+3rd
+seconds
+contemporary
+paid
+1943
+prison
+save
+kitchen
+label
+administrative
+intended
+constructed
+academic
+nice
+teacher
+races
+1956
+formerly
+corporation
+ben
+nation
+issued
+shut
+1958
+drums
+housing
+victoria
+seems
+opera
+1959
+graduated
+function
+von
+mentioned
+picked
+build
+recognized
+shortly
+protection
+picture
+notable
+exchange
+elections
+1980s
+loved
+percent
+racing
+fish
+elizabeth
+garden
+volume
+hockey
+1941
+beside
+settled
+##ford
+1940
+competed
+replied
+drew
+1948
+actress
+marine
+scotland
+steel
+glanced
+farm
+steve
+1957
+risk
+tonight
+positive
+magic
+singles
+effects
+gray
+screen
+dog
+##ja
+residents
+bus
+sides
+none
+secondary
+literature
+polish
+destroyed
+flying
+founder
+households
+1939
+lay
+reserve
+usa
+gallery
+##ler
+1946
+industrial
+younger
+approach
+appearances
+urban
+ones
+1950
+finish
+avenue
+powerful
+fully
+growth
+page
+honor
+jersey
+projects
+advanced
+revealed
+basic
+90
+infantry
+pair
+equipment
+visit
+33
+evening
+search
+grant
+effort
+solo
+treatment
+buried
+republican
+primarily
+bottom
+owner
+1970s
+israel
+gives
+jim
+dream
+bob
+remain
+spot
+70
+notes
+produce
+champions
+contact
+ed
+soul
+accepted
+ways
+del
+##ally
+losing
+split
+price
+capacity
+basis
+trial
+questions
+##ina
+1955
+20th
+guess
+officially
+memorial
+naval
+initial
+##ization
+whispered
+median
+engineer
+##ful
+sydney
+##go
+columbia
+strength
+300
+1952
+tears
+senate
+00
+card
+asian
+agent
+1947
+software
+44
+draw
+warm
+supposed
+com
+pro
+##il
+transferred
+leaned
+##at
+candidate
+escape
+mountains
+asia
+potential
+activity
+entertainment
+seem
+traffic
+jackson
+murder
+36
+slow
+product
+orchestra
+haven
+agency
+bbc
+taught
+website
+comedy
+unable
+storm
+planning
+albums
+rugby
+environment
+scientific
+grabbed
+protect
+##hi
+boat
+typically
+1954
+1953
+damage
+principal
+divided
+dedicated
+mount
+ohio
+##berg
+pick
+fought
+driver
+##der
+empty
+shoulders
+sort
+thank
+berlin
+prominent
+account
+freedom
+necessary
+efforts
+alex
+headquarters
+follows
+alongside
+des
+simon
+andrew
+suggested
+operating
+learning
+steps
+1949
+sweet
+technical
+begin
+easily
+34
+teeth
+speaking
+settlement
+scale
+##sh
+renamed
+ray
+max
+enemy
+semi
+joint
+compared
+##rd
+scottish
+leadership
+analysis
+offers
+georgia
+pieces
+captured
+animal
+deputy
+guest
+organized
+##lin
+tony
+combined
+method
+challenge
+1960s
+huge
+wants
+battalion
+sons
+rise
+crime
+types
+facilities
+telling
+path
+1951
+platform
+sit
+1990s
+##lo
+tells
+assigned
+rich
+pull
+##ot
+commonly
+alive
+##za
+letters
+concept
+conducted
+wearing
+happen
+bought
+becomes
+holy
+gets
+ocean
+defeat
+languages
+purchased
+coffee
+occurred
+titled
+##q
+declared
+applied
+sciences
+concert
+sounds
+jazz
+brain
+##me
+painting
+fleet
+tax
+nick
+##ius
+michigan
+count
+animals
+leaders
+episodes
+##line
+content
+##den
+birth
+##it
+clubs
+64
+palace
+critical
+refused
+fair
+leg
+laughed
+returning
+surrounding
+participated
+formation
+lifted
+pointed
+connected
+rome
+medicine
+laid
+taylor
+santa
+powers
+adam
+tall
+shared
+focused
+knowing
+yards
+entrance
+falls
+##wa
+calling
+##ad
+sources
+chosen
+beneath
+resources
+yard
+##ite
+nominated
+silence
+zone
+defined
+##que
+gained
+thirty
+38
+bodies
+moon
+##ard
+adopted
+christmas
+widely
+register
+apart
+iran
+premier
+serves
+du
+unknown
+parties
+##les
+generation
+##ff
+continues
+quick
+fields
+brigade
+quiet
+teaching
+clothes
+impact
+weapons
+partner
+flat
+theater
+supreme
+1938
+37
+relations
+##tor
+plants
+suffered
+1936
+wilson
+kids
+begins
+##age
+1918
+seats
+armed
+internet
+models
+worth
+laws
+400
+communities
+classes
+background
+knows
+thanks
+quarter
+reaching
+humans
+carry
+killing
+format
+kong
+hong
+setting
+75
+architecture
+disease
+railroad
+inc
+possibly
+wish
+arthur
+thoughts
+harry
+doors
+density
+##di
+crowd
+illinois
+stomach
+tone
+unique
+reports
+anyway
+##ir
+liberal
+der
+vehicle
+thick
+dry
+drug
+faced
+largely
+facility
+theme
+holds
+creation
+strange
+colonel
+##mi
+revolution
+bell
+politics
+turns
+silent
+rail
+relief
+independence
+combat
+shape
+write
+determined
+sales
+learned
+4th
+finger
+oxford
+providing
+1937
+heritage
+fiction
+situated
+designated
+allowing
+distribution
+hosted
+##est
+sight
+interview
+estimated
+reduced
+##ria
+toronto
+footballer
+keeping
+guys
+damn
+claim
+motion
+sport
+sixth
+stayed
+##ze
+en
+rear
+receive
+handed
+twelve
+dress
+audience
+granted
+brazil
+##well
+spirit
+##ated
+noticed
+etc
+olympic
+representative
+eric
+tight
+trouble
+reviews
+drink
+vampire
+missing
+roles
+ranked
+newly
+household
+finals
+wave
+critics
+##ee
+phase
+massachusetts
+pilot
+unlike
+philadelphia
+bright
+guns
+crown
+organizations
+roof
+42
+respectively
+clearly
+tongue
+marked
+circle
+fox
+korea
+bronze
+brian
+expanded
+sexual
+supply
+yourself
+inspired
+labour
+fc
+##ah
+reference
+vision
+draft
+connection
+brand
+reasons
+1935
+classic
+driving
+trip
+jesus
+cells
+entry
+1920
+neither
+trail
+claims
+atlantic
+orders
+labor
+nose
+afraid
+identified
+intelligence
+calls
+cancer
+attacked
+passing
+stephen
+positions
+imperial
+grey
+jason
+39
+sunday
+48
+swedish
+avoid
+extra
+uncle
+message
+covers
+allows
+surprise
+materials
+fame
+hunter
+##ji
+1930
+citizens
+figures
+davis
+environmental
+confirmed
+shit
+titles
+di
+performing
+difference
+acts
+attacks
+##ov
+existing
+votes
+opportunity
+nor
+shop
+entirely
+trains
+opposite
+pakistan
+##pa
+develop
+resulted
+representatives
+actions
+reality
+pressed
+##ish
+barely
+wine
+conversation
+faculty
+northwest
+ends
+documentary
+nuclear
+stock
+grace
+sets
+eat
+alternative
+##ps
+bag
+resulting
+creating
+surprised
+cemetery
+1919
+drop
+finding
+sarah
+cricket
+streets
+tradition
+ride
+1933
+exhibition
+target
+ear
+explained
+rain
+composer
+injury
+apartment
+municipal
+educational
+occupied
+netherlands
+clean
+billion
+constitution
+learn
+1914
+maximum
+classical
+francis
+lose
+opposition
+jose
+ontario
+bear
+core
+hills
+rolled
+ending
+drawn
+permanent
+fun
+##tes
+##lla
+lewis
+sites
+chamber
+ryan
+##way
+scoring
+height
+1934
+##house
+lyrics
+staring
+55
+officials
+1917
+snow
+oldest
+##tic
+orange
+##ger
+qualified
+interior
+apparently
+succeeded
+thousand
+dinner
+lights
+existence
+fans
+heavily
+41
+greatest
+conservative
+send
+bowl
+plus
+enter
+catch
+##un
+economy
+duty
+1929
+speech
+authorities
+princess
+performances
+versions
+shall
+graduate
+pictures
+effective
+remembered
+poetry
+desk
+crossed
+starring
+starts
+passenger
+sharp
+##ant
+acres
+ass
+weather
+falling
+rank
+fund
+supporting
+check
+adult
+publishing
+heads
+cm
+southeast
+lane
+##burg
+application
+bc
+##ura
+les
+condition
+transfer
+prevent
+display
+ex
+regions
+earl
+federation
+cool
+relatively
+answered
+besides
+1928
+obtained
+portion
+##town
+mix
+##ding
+reaction
+liked
+dean
+express
+peak
+1932
+##tte
+counter
+religion
+chain
+rare
+miller
+convention
+aid
+lie
+vehicles
+mobile
+perform
+squad
+wonder
+lying
+crazy
+sword
+##ping
+attempted
+centuries
+weren
+philosophy
+category
+##ize
+anna
+interested
+47
+sweden
+wolf
+frequently
+abandoned
+kg
+literary
+alliance
+task
+entitled
+##ay
+threw
+promotion
+factory
+tiny
+soccer
+visited
+matt
+fm
+achieved
+52
+defence
+internal
+persian
+43
+methods
+##ging
+arrested
+otherwise
+cambridge
+programming
+villages
+elementary
+districts
+rooms
+criminal
+conflict
+worry
+trained
+1931
+attempts
+waited
+signal
+bird
+truck
+subsequent
+programme
+##ol
+ad
+49
+communist
+details
+faith
+sector
+patrick
+carrying
+laugh
+##ss
+controlled
+korean
+showing
+origin
+fuel
+evil
+1927
+##ent
+brief
+identity
+darkness
+address
+pool
+missed
+publication
+web
+planet
+ian
+anne
+wings
+invited
+##tt
+briefly
+standards
+kissed
+##be
+ideas
+climate
+causing
+walter
+worse
+albert
+articles
+winners
+desire
+aged
+northeast
+dangerous
+gate
+doubt
+1922
+wooden
+multi
+##ky
+poet
+rising
+funding
+46
+communications
+communication
+violence
+copies
+prepared
+ford
+investigation
+skills
+1924
+pulling
+electronic
+##ak
+##ial
+##han
+containing
+ultimately
+offices
+singing
+understanding
+restaurant
+tomorrow
+fashion
+christ
+ward
+da
+pope
+stands
+5th
+flow
+studios
+aired
+commissioned
+contained
+exist
+fresh
+americans
+##per
+wrestling
+approved
+kid
+employed
+respect
+suit
+1925
+angel
+asking
+increasing
+frame
+angry
+selling
+1950s
+thin
+finds
+##nd
+temperature
+statement
+ali
+explain
+inhabitants
+towns
+extensive
+narrow
+51
+jane
+flowers
+images
+promise
+somewhere
+object
+fly
+closely
+##ls
+1912
+bureau
+cape
+1926
+weekly
+presidential
+legislative
+1921
+##ai
+##au
+launch
+founding
+##ny
+978
+##ring
+artillery
+strike
+un
+institutions
+roll
+writers
+landing
+chose
+kevin
+anymore
+pp
+##ut
+attorney
+fit
+dan
+billboard
+receiving
+agricultural
+breaking
+sought
+dave
+admitted
+lands
+mexican
+##bury
+charlie
+specifically
+hole
+iv
+howard
+credit
+moscow
+roads
+accident
+1923
+proved
+wear
+struck
+hey
+guards
+stuff
+slid
+expansion
+1915
+cat
+anthony
+##kin
+melbourne
+opposed
+sub
+southwest
+architect
+failure
+plane
+1916
+##ron
+map
+camera
+tank
+listen
+regarding
+wet
+introduction
+metropolitan
+link
+ep
+fighter
+inch
+grown
+gene
+anger
+fixed
+buy
+dvd
+khan
+domestic
+worldwide
+chapel
+mill
+functions
+examples
+##head
+developing
+1910
+turkey
+hits
+pocket
+antonio
+papers
+grow
+unless
+circuit
+18th
+concerned
+attached
+journalist
+selection
+journey
+converted
+provincial
+painted
+hearing
+aren
+bands
+negative
+aside
+wondered
+knight
+lap
+survey
+ma
+##ow
+noise
+billy
+##ium
+shooting
+guide
+bedroom
+priest
+resistance
+motor
+homes
+sounded
+giant
+##mer
+150
+scenes
+equal
+comic
+patients
+hidden
+solid
+actual
+bringing
+afternoon
+touched
+funds
+wedding
+consisted
+marie
+canal
+sr
+kim
+treaty
+turkish
+recognition
+residence
+cathedral
+broad
+knees
+incident
+shaped
+fired
+norwegian
+handle
+cheek
+contest
+represent
+##pe
+representing
+beauty
+##sen
+birds
+advantage
+emergency
+wrapped
+drawing
+notice
+pink
+broadcasting
+##ong
+somehow
+bachelor
+seventh
+collected
+registered
+establishment
+alan
+assumed
+chemical
+personnel
+roger
+retirement
+jeff
+portuguese
+wore
+tied
+device
+threat
+progress
+advance
+##ised
+banks
+hired
+manchester
+nfl
+teachers
+structures
+forever
+##bo
+tennis
+helping
+saturday
+sale
+applications
+junction
+hip
+incorporated
+neighborhood
+dressed
+ceremony
+##ds
+influenced
+hers
+visual
+stairs
+decades
+inner
+kansas
+hung
+hoped
+gain
+scheduled
+downtown
+engaged
+austria
+clock
+norway
+certainly
+pale
+protected
+1913
+victor
+employees
+plate
+putting
+surrounded
+##ists
+finishing
+blues
+tropical
+##ries
+minnesota
+consider
+philippines
+accept
+54
+retrieved
+1900
+concern
+anderson
+properties
+institution
+gordon
+successfully
+vietnam
+##dy
+backing
+outstanding
+muslim
+crossing
+folk
+producing
+usual
+demand
+occurs
+observed
+lawyer
+educated
+##ana
+kelly
+string
+pleasure
+budget
+items
+quietly
+colorado
+philip
+typical
+##worth
+derived
+600
+survived
+asks
+mental
+##ide
+56
+jake
+jews
+distinguished
+ltd
+1911
+sri
+extremely
+53
+athletic
+loud
+thousands
+worried
+shadow
+transportation
+horses
+weapon
+arena
+importance
+users
+tim
+objects
+contributed
+dragon
+douglas
+aware
+senator
+johnny
+jordan
+sisters
+engines
+flag
+investment
+samuel
+shock
+capable
+clark
+row
+wheel
+refers
+session
+familiar
+biggest
+wins
+hate
+maintained
+drove
+hamilton
+request
+expressed
+injured
+underground
+churches
+walker
+wars
+tunnel
+passes
+stupid
+agriculture
+softly
+cabinet
+regarded
+joining
+indiana
+##ea
+##ms
+push
+dates
+spend
+behavior
+woods
+protein
+gently
+chase
+morgan
+mention
+burning
+wake
+combination
+occur
+mirror
+leads
+jimmy
+indeed
+impossible
+singapore
+paintings
+covering
+##nes
+soldier
+locations
+attendance
+sell
+historian
+wisconsin
+invasion
+argued
+painter
+diego
+changing
+egypt
+##don
+experienced
+inches
+##ku
+missouri
+vol
+grounds
+spoken
+switzerland
+##gan
+reform
+rolling
+ha
+forget
+massive
+resigned
+burned
+allen
+tennessee
+locked
+values
+improved
+##mo
+wounded
+universe
+sick
+dating
+facing
+pack
+purchase
+user
+##pur
+moments
+##ul
+merged
+anniversary
+1908
+coal
+brick
+understood
+causes
+dynasty
+queensland
+establish
+stores
+crisis
+promote
+hoping
+views
+cards
+referee
+extension
+##si
+raise
+arizona
+improve
+colonial
+formal
+charged
+##rt
+palm
+lucky
+hide
+rescue
+faces
+95
+feelings
+candidates
+juan
+##ell
+goods
+6th
+courses
+weekend
+59
+luke
+cash
+fallen
+##om
+delivered
+affected
+installed
+carefully
+tries
+swiss
+hollywood
+costs
+lincoln
+responsibility
+##he
+shore
+file
+proper
+normally
+maryland
+assistance
+jump
+constant
+offering
+friendly
+waters
+persons
+realize
+contain
+trophy
+800
+partnership
+factor
+58
+musicians
+cry
+bound
+oregon
+indicated
+hero
+houston
+medium
+##ure
+consisting
+somewhat
+##ara
+57
+cycle
+##che
+beer
+moore
+frederick
+gotten
+eleven
+worst
+weak
+approached
+arranged
+chin
+loan
+universal
+bond
+fifteen
+pattern
+disappeared
+##ney
+translated
+##zed
+lip
+arab
+capture
+interests
+insurance
+##chi
+shifted
+cave
+prix
+warning
+sections
+courts
+coat
+plot
+smell
+feed
+golf
+favorite
+maintain
+knife
+vs
+voted
+degrees
+finance
+quebec
+opinion
+translation
+manner
+ruled
+operate
+productions
+choose
+musician
+discovery
+confused
+tired
+separated
+stream
+techniques
+committed
+attend
+ranking
+kings
+throw
+passengers
+measure
+horror
+fan
+mining
+sand
+danger
+salt
+calm
+decade
+dam
+require
+runner
+##ik
+rush
+associate
+greece
+##ker
+rivers
+consecutive
+matthew
+##ski
+sighed
+sq
+documents
+steam
+edited
+closing
+tie
+accused
+1905
+##ini
+islamic
+distributed
+directors
+organisation
+bruce
+7th
+breathing
+mad
+lit
+arrival
+concrete
+taste
+08
+composition
+shaking
+faster
+amateur
+adjacent
+stating
+1906
+twin
+flew
+##ran
+tokyo
+publications
+##tone
+obviously
+ridge
+storage
+1907
+carl
+pages
+concluded
+desert
+driven
+universities
+ages
+terminal
+sequence
+borough
+250
+constituency
+creative
+cousin
+economics
+dreams
+margaret
+notably
+reduce
+montreal
+mode
+17th
+ears
+saved
+jan
+vocal
+##ica
+1909
+andy
+##jo
+riding
+roughly
+threatened
+##ise
+meters
+meanwhile
+landed
+compete
+repeated
+grass
+czech
+regularly
+charges
+tea
+sudden
+appeal
+##ung
+solution
+describes
+pierre
+classification
+glad
+parking
+##ning
+belt
+physics
+99
+rachel
+add
+hungarian
+participate
+expedition
+damaged
+gift
+childhood
+85
+fifty
+##red
+mathematics
+jumped
+letting
+defensive
+mph
+##ux
+##gh
+testing
+##hip
+hundreds
+shoot
+owners
+matters
+smoke
+israeli
+kentucky
+dancing
+mounted
+grandfather
+emma
+designs
+profit
+argentina
+##gs
+truly
+li
+lawrence
+cole
+begun
+detroit
+willing
+branches
+smiling
+decide
+miami
+enjoyed
+recordings
+##dale
+poverty
+ethnic
+gay
+##bi
+gary
+arabic
+09
+accompanied
+##one
+##ons
+fishing
+determine
+residential
+acid
+##ary
+alice
+returns
+starred
+mail
+##ang
+jonathan
+strategy
+##ue
+net
+forty
+cook
+businesses
+equivalent
+commonwealth
+distinct
+ill
+##cy
+seriously
+##ors
+##ped
+shift
+harris
+replace
+rio
+imagine
+formula
+ensure
+##ber
+additionally
+scheme
+conservation
+occasionally
+purposes
+feels
+favor
+##and
+##ore
+1930s
+contrast
+hanging
+hunt
+movies
+1904
+instruments
+victims
+danish
+christopher
+busy
+demon
+sugar
+earliest
+colony
+studying
+balance
+duties
+##ks
+belgium
+slipped
+carter
+05
+visible
+stages
+iraq
+fifa
+##im
+commune
+forming
+zero
+07
+continuing
+talked
+counties
+legend
+bathroom
+option
+tail
+clay
+daughters
+afterwards
+severe
+jaw
+visitors
+##ded
+devices
+aviation
+russell
+kate
+##vi
+entering
+subjects
+##ino
+temporary
+swimming
+forth
+smooth
+ghost
+audio
+bush
+operates
+rocks
+movements
+signs
+eddie
+##tz
+ann
+voices
+honorary
+06
+memories
+dallas
+pure
+measures
+racial
+promised
+66
+harvard
+ceo
+16th
+parliamentary
+indicate
+benefit
+flesh
+dublin
+louisiana
+1902
+1901
+patient
+sleeping
+1903
+membership
+coastal
+medieval
+wanting
+element
+scholars
+rice
+62
+limit
+survive
+makeup
+rating
+definitely
+collaboration
+obvious
+##tan
+boss
+ms
+baron
+birthday
+linked
+soil
+diocese
+##lan
+ncaa
+##mann
+offensive
+shell
+shouldn
+waist
+##tus
+plain
+ross
+organ
+resolution
+manufacturing
+adding
+relative
+kennedy
+98
+whilst
+moth
+marketing
+gardens
+crash
+72
+heading
+partners
+credited
+carlos
+moves
+cable
+##zi
+marshall
+##out
+depending
+bottle
+represents
+rejected
+responded
+existed
+04
+jobs
+denmark
+lock
+##ating
+treated
+graham
+routes
+talent
+commissioner
+drugs
+secure
+tests
+reign
+restored
+photography
+##gi
+contributions
+oklahoma
+designer
+disc
+grin
+seattle
+robin
+paused
+atlanta
+unusual
+##gate
+praised
+las
+laughing
+satellite
+hungary
+visiting
+##sky
+interesting
+factors
+deck
+poems
+norman
+##water
+stuck
+speaker
+rifle
+domain
+premiered
+##her
+dc
+comics
+actors
+01
+reputation
+eliminated
+8th
+ceiling
+prisoners
+script
+##nce
+leather
+austin
+mississippi
+rapidly
+admiral
+parallel
+charlotte
+guilty
+tools
+gender
+divisions
+fruit
+##bs
+laboratory
+nelson
+fantasy
+marry
+rapid
+aunt
+tribe
+requirements
+aspects
+suicide
+amongst
+adams
+bone
+ukraine
+abc
+kick
+sees
+edinburgh
+clothing
+column
+rough
+gods
+hunting
+broadway
+gathered
+concerns
+##ek
+spending
+ty
+12th
+snapped
+requires
+solar
+bones
+cavalry
+##tta
+iowa
+drinking
+waste
+index
+franklin
+charity
+thompson
+stewart
+tip
+flash
+landscape
+friday
+enjoy
+singh
+poem
+listening
+##back
+eighth
+fred
+differences
+adapted
+bomb
+ukrainian
+surgery
+corporate
+masters
+anywhere
+##more
+waves
+odd
+sean
+portugal
+orleans
+dick
+debate
+kent
+eating
+puerto
+cleared
+96
+expect
+cinema
+97
+guitarist
+blocks
+electrical
+agree
+involving
+depth
+dying
+panel
+struggle
+##ged
+peninsula
+adults
+novels
+emerged
+vienna
+metro
+debuted
+shoes
+tamil
+songwriter
+meets
+prove
+beating
+instance
+heaven
+scared
+sending
+marks
+artistic
+passage
+superior
+03
+significantly
+shopping
+##tive
+retained
+##izing
+malaysia
+technique
+cheeks
+##ola
+warren
+maintenance
+destroy
+extreme
+allied
+120
+appearing
+##yn
+fill
+advice
+alabama
+qualifying
+policies
+cleveland
+hat
+battery
+smart
+authors
+10th
+soundtrack
+acted
+dated
+lb
+glance
+equipped
+coalition
+funny
+outer
+ambassador
+roy
+possibility
+couples
+campbell
+dna
+loose
+ethan
+supplies
+1898
+gonna
+88
+monster
+##res
+shake
+agents
+frequency
+springs
+dogs
+practices
+61
+gang
+plastic
+easier
+suggests
+gulf
+blade
+exposed
+colors
+industries
+markets
+pan
+nervous
+electoral
+charts
+legislation
+ownership
+##idae
+mac
+appointment
+shield
+copy
+assault
+socialist
+abbey
+monument
+license
+throne
+employment
+jay
+93
+replacement
+charter
+cloud
+powered
+suffering
+accounts
+oak
+connecticut
+strongly
+wright
+colour
+crystal
+13th
+context
+welsh
+networks
+voiced
+gabriel
+jerry
+##cing
+forehead
+mp
+##ens
+manage
+schedule
+totally
+remix
+##ii
+forests
+occupation
+print
+nicholas
+brazilian
+strategic
+vampires
+engineers
+76
+roots
+seek
+correct
+instrumental
+und
+alfred
+backed
+hop
+##des
+stanley
+robinson
+traveled
+wayne
+welcome
+austrian
+achieve
+67
+exit
+rates
+1899
+strip
+whereas
+##cs
+sing
+deeply
+adventure
+bobby
+rick
+jamie
+careful
+components
+cap
+useful
+personality
+knee
+##shi
+pushing
+hosts
+02
+protest
+ca
+ottoman
+symphony
+##sis
+63
+boundary
+1890
+processes
+considering
+considerable
+tons
+##work
+##ft
+##nia
+cooper
+trading
+dear
+conduct
+91
+illegal
+apple
+revolutionary
+holiday
+definition
+harder
+##van
+jacob
+circumstances
+destruction
+##lle
+popularity
+grip
+classified
+liverpool
+donald
+baltimore
+flows
+seeking
+honour
+approval
+92
+mechanical
+till
+happening
+statue
+critic
+increasingly
+immediate
+describe
+commerce
+stare
+##ster
+indonesia
+meat
+rounds
+boats
+baker
+orthodox
+depression
+formally
+worn
+naked
+claire
+muttered
+sentence
+11th
+emily
+document
+77
+criticism
+wished
+vessel
+spiritual
+bent
+virgin
+parker
+minimum
+murray
+lunch
+danny
+printed
+compilation
+keyboards
+false
+blow
+belonged
+68
+raising
+78
+cutting
+##board
+pittsburgh
+##up
+9th
+shadows
+81
+hated
+indigenous
+jon
+15th
+barry
+scholar
+ah
+##zer
+oliver
+##gy
+stick
+susan
+meetings
+attracted
+spell
+romantic
+##ver
+ye
+1895
+photo
+demanded
+customers
+##ac
+1896
+logan
+revival
+keys
+modified
+commanded
+jeans
+##ious
+upset
+raw
+phil
+detective
+hiding
+resident
+vincent
+##bly
+experiences
+diamond
+defeating
+coverage
+lucas
+external
+parks
+franchise
+helen
+bible
+successor
+percussion
+celebrated
+il
+lift
+profile
+clan
+romania
+##ied
+mills
+##su
+nobody
+achievement
+shrugged
+fault
+1897
+rhythm
+initiative
+breakfast
+carbon
+700
+69
+lasted
+violent
+74
+wound
+ken
+killer
+gradually
+filmed
+°c
+dollars
+processing
+94
+remove
+criticized
+guests
+sang
+chemistry
+##vin
+legislature
+disney
+##bridge
+uniform
+escaped
+integrated
+proposal
+purple
+denied
+liquid
+karl
+influential
+morris
+nights
+stones
+intense
+experimental
+twisted
+71
+84
+##ld
+pace
+nazi
+mitchell
+ny
+blind
+reporter
+newspapers
+14th
+centers
+burn
+basin
+forgotten
+surviving
+filed
+collections
+monastery
+losses
+manual
+couch
+description
+appropriate
+merely
+tag
+missions
+sebastian
+restoration
+replacing
+triple
+73
+elder
+julia
+warriors
+benjamin
+julian
+convinced
+stronger
+amazing
+declined
+versus
+merchant
+happens
+output
+finland
+bare
+barbara
+absence
+ignored
+dawn
+injuries
+##port
+producers
+##ram
+82
+luis
+##ities
+kw
+admit
+expensive
+electricity
+nba
+exception
+symbol
+##ving
+ladies
+shower
+sheriff
+characteristics
+##je
+aimed
+button
+ratio
+effectively
+summit
+angle
+jury
+bears
+foster
+vessels
+pants
+executed
+evans
+dozen
+advertising
+kicked
+patrol
+1889
+competitions
+lifetime
+principles
+athletics
+##logy
+birmingham
+sponsored
+89
+rob
+nomination
+1893
+acoustic
+##sm
+creature
+longest
+##tra
+credits
+harbor
+dust
+josh
+##so
+territories
+milk
+infrastructure
+completion
+thailand
+indians
+leon
+archbishop
+##sy
+assist
+pitch
+blake
+arrangement
+girlfriend
+serbian
+operational
+hence
+sad
+scent
+fur
+dj
+sessions
+hp
+refer
+rarely
+##ora
+exists
+1892
+##ten
+scientists
+dirty
+penalty
+burst
+portrait
+seed
+79
+pole
+limits
+rival
+1894
+stable
+alpha
+grave
+constitutional
+alcohol
+arrest
+flower
+mystery
+devil
+architectural
+relationships
+greatly
+habitat
+##istic
+larry
+progressive
+remote
+cotton
+##ics
+##ok
+preserved
+reaches
+##ming
+cited
+86
+vast
+scholarship
+decisions
+cbs
+joy
+teach
+1885
+editions
+knocked
+eve
+searching
+partly
+participation
+gap
+animated
+fate
+excellent
+##ett
+na
+87
+alternate
+saints
+youngest
+##ily
+climbed
+##ita
+##tors
+suggest
+##ct
+discussion
+staying
+choir
+lakes
+jacket
+revenue
+nevertheless
+peaked
+instrument
+wondering
+annually
+managing
+neil
+1891
+signing
+terry
+##ice
+apply
+clinical
+brooklyn
+aim
+catherine
+fuck
+farmers
+figured
+ninth
+pride
+hugh
+evolution
+ordinary
+involvement
+comfortable
+shouted
+tech
+encouraged
+taiwan
+representation
+sharing
+##lia
+##em
+panic
+exact
+cargo
+competing
+fat
+cried
+83
+1920s
+occasions
+pa
+cabin
+borders
+utah
+marcus
+##isation
+badly
+muscles
+##ance
+victorian
+transition
+warner
+bet
+permission
+##rin
+slave
+terrible
+similarly
+shares
+seth
+uefa
+possession
+medals
+benefits
+colleges
+lowered
+perfectly
+mall
+transit
+##ye
+##kar
+publisher
+##ened
+harrison
+deaths
+elevation
+##ae
+asleep
+machines
+sigh
+ash
+hardly
+argument
+occasion
+parent
+leo
+decline
+1888
+contribution
+##ua
+concentration
+1000
+opportunities
+hispanic
+guardian
+extent
+emotions
+hips
+mason
+volumes
+bloody
+controversy
+diameter
+steady
+mistake
+phoenix
+identify
+violin
+##sk
+departure
+richmond
+spin
+funeral
+enemies
+1864
+gear
+literally
+connor
+random
+sergeant
+grab
+confusion
+1865
+transmission
+informed
+op
+leaning
+sacred
+suspended
+thinks
+gates
+portland
+luck
+agencies
+yours
+hull
+expert
+muscle
+layer
+practical
+sculpture
+jerusalem
+latest
+lloyd
+statistics
+deeper
+recommended
+warrior
+arkansas
+mess
+supports
+greg
+eagle
+1880
+recovered
+rated
+concerts
+rushed
+##ano
+stops
+eggs
+files
+premiere
+keith
+##vo
+delhi
+turner
+pit
+affair
+belief
+paint
+##zing
+mate
+##ach
+##ev
+victim
+##ology
+withdrew
+bonus
+styles
+fled
+##ud
+glasgow
+technologies
+funded
+nbc
+adaptation
+##ata
+portrayed
+cooperation
+supporters
+judges
+bernard
+justin
+hallway
+ralph
+##ick
+graduating
+controversial
+distant
+continental
+spider
+bite
+##ho
+recognize
+intention
+mixing
+##ese
+egyptian
+bow
+tourism
+suppose
+claiming
+tiger
+dominated
+participants
+vi
+##ru
+nurse
+partially
+tape
+##rum
+psychology
+##rn
+essential
+touring
+duo
+voting
+civilian
+emotional
+channels
+##king
+apparent
+hebrew
+1887
+tommy
+carrier
+intersection
+beast
+hudson
+##gar
+##zo
+lab
+nova
+bench
+discuss
+costa
+##ered
+detailed
+behalf
+drivers
+unfortunately
+obtain
+##lis
+rocky
+##dae
+siege
+friendship
+honey
+##rian
+1861
+amy
+hang
+posted
+governments
+collins
+respond
+wildlife
+preferred
+operator
+##po
+laura
+pregnant
+videos
+dennis
+suspected
+boots
+instantly
+weird
+automatic
+businessman
+alleged
+placing
+throwing
+ph
+mood
+1862
+perry
+venue
+jet
+remainder
+##lli
+##ci
+passion
+biological
+boyfriend
+1863
+dirt
+buffalo
+ron
+segment
+fa
+abuse
+##era
+genre
+thrown
+stroke
+colored
+stress
+exercise
+displayed
+##gen
+struggled
+##tti
+abroad
+dramatic
+wonderful
+thereafter
+madrid
+component
+widespread
+##sed
+tale
+citizen
+todd
+monday
+1886
+vancouver
+overseas
+forcing
+crying
+descent
+##ris
+discussed
+substantial
+ranks
+regime
+1870
+provinces
+switch
+drum
+zane
+ted
+tribes
+proof
+lp
+cream
+researchers
+volunteer
+manor
+silk
+milan
+donated
+allies
+venture
+principle
+delivery
+enterprise
+##ves
+##ans
+bars
+traditionally
+witch
+reminded
+copper
+##uk
+pete
+inter
+links
+colin
+grinned
+elsewhere
+competitive
+frequent
+##oy
+scream
+##hu
+tension
+texts
+submarine
+finnish
+defending
+defend
+pat
+detail
+1884
+affiliated
+stuart
+themes
+villa
+periods
+tool
+belgian
+ruling
+crimes
+answers
+folded
+licensed
+resort
+demolished
+hans
+lucy
+1881
+lion
+traded
+photographs
+writes
+craig
+##fa
+trials
+generated
+beth
+noble
+debt
+percentage
+yorkshire
+erected
+ss
+viewed
+grades
+confidence
+ceased
+islam
+telephone
+retail
+##ible
+chile
+m²
+roberts
+sixteen
+##ich
+commented
+hampshire
+innocent
+dual
+pounds
+checked
+regulations
+afghanistan
+sung
+rico
+liberty
+assets
+bigger
+options
+angels
+relegated
+tribute
+wells
+attending
+leaf
+##yan
+butler
+romanian
+forum
+monthly
+lisa
+patterns
+gmina
+##tory
+madison
+hurricane
+rev
+##ians
+bristol
+##ula
+elite
+valuable
+disaster
+democracy
+awareness
+germans
+freyja
+##ins
+loop
+absolutely
+paying
+populations
+maine
+sole
+prayer
+spencer
+releases
+doorway
+bull
+##ani
+lover
+midnight
+conclusion
+##sson
+thirteen
+lily
+mediterranean
+##lt
+nhl
+proud
+sample
+##hill
+drummer
+guinea
+##ova
+murphy
+climb
+##ston
+instant
+attributed
+horn
+ain
+railways
+steven
+##ao
+autumn
+ferry
+opponent
+root
+traveling
+secured
+corridor
+stretched
+tales
+sheet
+trinity
+cattle
+helps
+indicates
+manhattan
+murdered
+fitted
+1882
+gentle
+grandmother
+mines
+shocked
+vegas
+produces
+##light
+caribbean
+##ou
+belong
+continuous
+desperate
+drunk
+historically
+trio
+waved
+raf
+dealing
+nathan
+bat
+murmured
+interrupted
+residing
+scientist
+pioneer
+harold
+aaron
+##net
+delta
+attempting
+minority
+mini
+believes
+chorus
+tend
+lots
+eyed
+indoor
+load
+shots
+updated
+jail
+##llo
+concerning
+connecting
+wealth
+##ved
+slaves
+arrive
+rangers
+sufficient
+rebuilt
+##wick
+cardinal
+flood
+muhammad
+whenever
+relation
+runners
+moral
+repair
+viewers
+arriving
+revenge
+punk
+assisted
+bath
+fairly
+breathe
+lists
+innings
+illustrated
+whisper
+nearest
+voters
+clinton
+ties
+ultimate
+screamed
+beijing
+lions
+andre
+fictional
+gathering
+comfort
+radar
+suitable
+dismissed
+hms
+ban
+pine
+wrist
+atmosphere
+voivodeship
+bid
+timber
+##ned
+##nan
+giants
+##ane
+cameron
+recovery
+uss
+identical
+categories
+switched
+serbia
+laughter
+noah
+ensemble
+therapy
+peoples
+touching
+##off
+locally
+pearl
+platforms
+everywhere
+ballet
+tables
+lanka
+herbert
+outdoor
+toured
+derek
+1883
+spaces
+contested
+swept
+1878
+exclusive
+slight
+connections
+##dra
+winds
+prisoner
+collective
+bangladesh
+tube
+publicly
+wealthy
+thai
+##ys
+isolated
+select
+##ric
+insisted
+pen
+fortune
+ticket
+spotted
+reportedly
+animation
+enforcement
+tanks
+110
+decides
+wider
+lowest
+owen
+##time
+nod
+hitting
+##hn
+gregory
+furthermore
+magazines
+fighters
+solutions
+##ery
+pointing
+requested
+peru
+reed
+chancellor
+knights
+mask
+worker
+eldest
+flames
+reduction
+1860
+volunteers
+##tis
+reporting
+##hl
+wire
+advisory
+endemic
+origins
+settlers
+pursue
+knock
+consumer
+1876
+eu
+compound
+creatures
+mansion
+sentenced
+ivan
+deployed
+guitars
+frowned
+involves
+mechanism
+kilometers
+perspective
+shops
+maps
+terminus
+duncan
+alien
+fist
+bridges
+##pers
+heroes
+fed
+derby
+swallowed
+##ros
+patent
+sara
+illness
+characterized
+adventures
+slide
+hawaii
+jurisdiction
+##op
+organised
+##side
+adelaide
+walks
+biology
+se
+##ties
+rogers
+swing
+tightly
+boundaries
+##rie
+prepare
+implementation
+stolen
+##sha
+certified
+colombia
+edwards
+garage
+##mm
+recalled
+##ball
+rage
+harm
+nigeria
+breast
+##ren
+furniture
+pupils
+settle
+##lus
+cuba
+balls
+client
+alaska
+21st
+linear
+thrust
+celebration
+latino
+genetic
+terror
+##cia
+##ening
+lightning
+fee
+witness
+lodge
+establishing
+skull
+##ique
+earning
+hood
+##ei
+rebellion
+wang
+sporting
+warned
+missile
+devoted
+activist
+porch
+worship
+fourteen
+package
+1871
+decorated
+##shire
+housed
+##ock
+chess
+sailed
+doctors
+oscar
+joan
+treat
+garcia
+harbour
+jeremy
+##ire
+traditions
+dominant
+jacques
+##gon
+##wan
+relocated
+1879
+amendment
+sized
+companion
+simultaneously
+volleyball
+spun
+acre
+increases
+stopping
+loves
+belongs
+affect
+drafted
+tossed
+scout
+battles
+1875
+filming
+shoved
+munich
+tenure
+vertical
+romance
+pc
+##cher
+argue
+##ical
+craft
+ranging
+www
+opens
+honest
+tyler
+yesterday
+virtual
+##let
+muslims
+reveal
+snake
+immigrants
+radical
+screaming
+speakers
+firing
+saving
+belonging
+ease
+lighting
+prefecture
+blame
+farmer
+hungry
+grows
+rubbed
+beam
+sur
+subsidiary
+##cha
+armenian
+sao
+dropping
+conventional
+##fer
+microsoft
+reply
+qualify
+spots
+1867
+sweat
+festivals
+##ken
+immigration
+physician
+discover
+exposure
+sandy
+explanation
+isaac
+implemented
+##fish
+hart
+initiated
+connect
+stakes
+presents
+heights
+householder
+pleased
+tourist
+regardless
+slip
+closest
+##ction
+surely
+sultan
+brings
+riley
+preparation
+aboard
+slammed
+baptist
+experiment
+ongoing
+interstate
+organic
+playoffs
+##ika
+1877
+130
+##tar
+hindu
+error
+tours
+tier
+plenty
+arrangements
+talks
+trapped
+excited
+sank
+ho
+athens
+1872
+denver
+welfare
+suburb
+athletes
+trick
+diverse
+belly
+exclusively
+yelled
+1868
+##med
+conversion
+##ette
+1874
+internationally
+computers
+conductor
+abilities
+sensitive
+hello
+dispute
+measured
+globe
+rocket
+prices
+amsterdam
+flights
+tigers
+inn
+municipalities
+emotion
+references
+3d
+##mus
+explains
+airlines
+manufactured
+pm
+archaeological
+1873
+interpretation
+devon
+comment
+##ites
+settlements
+kissing
+absolute
+improvement
+suite
+impressed
+barcelona
+sullivan
+jefferson
+towers
+jesse
+julie
+##tin
+##lu
+grandson
+hi
+gauge
+regard
+rings
+interviews
+trace
+raymond
+thumb
+departments
+burns
+serial
+bulgarian
+scores
+demonstrated
+##ix
+1866
+kyle
+alberta
+underneath
+romanized
+##ward
+relieved
+acquisition
+phrase
+cliff
+reveals
+han
+cuts
+merger
+custom
+##dar
+nee
+gilbert
+graduation
+##nts
+assessment
+cafe
+difficulty
+demands
+swung
+democrat
+jennifer
+commons
+1940s
+grove
+##yo
+completing
+focuses
+sum
+substitute
+bearing
+stretch
+reception
+##py
+reflected
+essentially
+destination
+pairs
+##ched
+survival
+resource
+##bach
+promoting
+doubles
+messages
+tear
+##down
+##fully
+parade
+florence
+harvey
+incumbent
+partial
+framework
+900
+pedro
+frozen
+procedure
+olivia
+controls
+##mic
+shelter
+personally
+temperatures
+##od
+brisbane
+tested
+sits
+marble
+comprehensive
+oxygen
+leonard
+##kov
+inaugural
+iranian
+referring
+quarters
+attitude
+##ivity
+mainstream
+lined
+mars
+dakota
+norfolk
+unsuccessful
+##°
+explosion
+helicopter
+congressional
+##sing
+inspector
+bitch
+seal
+departed
+divine
+##ters
+coaching
+examination
+punishment
+manufacturer
+sink
+columns
+unincorporated
+signals
+nevada
+squeezed
+dylan
+dining
+photos
+martial
+manuel
+eighteen
+elevator
+brushed
+plates
+ministers
+ivy
+congregation
+##len
+slept
+specialized
+taxes
+curve
+restricted
+negotiations
+likes
+statistical
+arnold
+inspiration
+execution
+bold
+intermediate
+significance
+margin
+ruler
+wheels
+gothic
+intellectual
+dependent
+listened
+eligible
+buses
+widow
+syria
+earn
+cincinnati
+collapsed
+recipient
+secrets
+accessible
+philippine
+maritime
+goddess
+clerk
+surrender
+breaks
+playoff
+database
+##ified
+##lon
+ideal
+beetle
+aspect
+soap
+regulation
+strings
+expand
+anglo
+shorter
+crosses
+retreat
+tough
+coins
+wallace
+directions
+pressing
+##oon
+shipping
+locomotives
+comparison
+topics
+nephew
+##mes
+distinction
+honors
+travelled
+sierra
+ibn
+##over
+fortress
+sa
+recognised
+carved
+1869
+clients
+##dan
+intent
+##mar
+coaches
+describing
+bread
+##ington
+beaten
+northwestern
+##ona
+merit
+youtube
+collapse
+challenges
+em
+historians
+objective
+submitted
+virus
+attacking
+drake
+assume
+##ere
+diseases
+marc
+stem
+leeds
+##cus
+##ab
+farming
+glasses
+##lock
+visits
+nowhere
+fellowship
+relevant
+carries
+restaurants
+experiments
+101
+constantly
+bases
+targets
+shah
+tenth
+opponents
+verse
+territorial
+##ira
+writings
+corruption
+##hs
+instruction
+inherited
+reverse
+emphasis
+##vic
+employee
+arch
+keeps
+rabbi
+watson
+payment
+uh
+##ala
+nancy
+##tre
+venice
+fastest
+sexy
+banned
+adrian
+properly
+ruth
+touchdown
+dollar
+boards
+metre
+circles
+edges
+favour
+comments
+ok
+travels
+liberation
+scattered
+firmly
+##ular
+holland
+permitted
+diesel
+kenya
+den
+originated
+##ral
+demons
+resumed
+dragged
+rider
+##rus
+servant
+blinked
+extend
+torn
+##ias
+##sey
+input
+meal
+everybody
+cylinder
+kinds
+camps
+##fe
+bullet
+logic
+##wn
+croatian
+evolved
+healthy
+fool
+chocolate
+wise
+preserve
+pradesh
+##ess
+respective
+1850
+##ew
+chicken
+artificial
+gross
+corresponding
+convicted
+cage
+caroline
+dialogue
+##dor
+narrative
+stranger
+mario
+br
+christianity
+failing
+trent
+commanding
+buddhist
+1848
+maurice
+focusing
+yale
+bike
+altitude
+##ering
+mouse
+revised
+##sley
+veteran
+##ig
+pulls
+theology
+crashed
+campaigns
+legion
+##ability
+drag
+excellence
+customer
+cancelled
+intensity
+excuse
+##lar
+liga
+participating
+contributing
+printing
+##burn
+variable
+##rk
+curious
+bin
+legacy
+renaissance
+##my
+symptoms
+binding
+vocalist
+dancer
+##nie
+grammar
+gospel
+democrats
+ya
+enters
+sc
+diplomatic
+hitler
+##ser
+clouds
+mathematical
+quit
+defended
+oriented
+##heim
+fundamental
+hardware
+impressive
+equally
+convince
+confederate
+guilt
+chuck
+sliding
+##ware
+magnetic
+narrowed
+petersburg
+bulgaria
+otto
+phd
+skill
+##ama
+reader
+hopes
+pitcher
+reservoir
+hearts
+automatically
+expecting
+mysterious
+bennett
+extensively
+imagined
+seeds
+monitor
+fix
+##ative
+journalism
+struggling
+signature
+ranch
+encounter
+photographer
+observation
+protests
+##pin
+influences
+##hr
+calendar
+##all
+cruz
+croatia
+locomotive
+hughes
+naturally
+shakespeare
+basement
+hook
+uncredited
+faded
+theories
+approaches
+dare
+phillips
+filling
+fury
+obama
+##ain
+efficient
+arc
+deliver
+min
+raid
+breeding
+inducted
+leagues
+efficiency
+axis
+montana
+eagles
+##ked
+supplied
+instructions
+karen
+picking
+indicating
+trap
+anchor
+practically
+christians
+tomb
+vary
+occasional
+electronics
+lords
+readers
+newcastle
+faint
+innovation
+collect
+situations
+engagement
+160
+claude
+mixture
+##feld
+peer
+tissue
+logo
+lean
+##ration
+°f
+floors
+##ven
+architects
+reducing
+##our
+##ments
+rope
+1859
+ottawa
+##har
+samples
+banking
+declaration
+proteins
+resignation
+francois
+saudi
+advocate
+exhibited
+armor
+twins
+divorce
+##ras
+abraham
+reviewed
+jo
+temporarily
+matrix
+physically
+pulse
+curled
+##ena
+difficulties
+bengal
+usage
+##ban
+annie
+riders
+certificate
+##pi
+holes
+warsaw
+distinctive
+jessica
+##mon
+mutual
+1857
+customs
+circular
+eugene
+removal
+loaded
+mere
+vulnerable
+depicted
+generations
+dame
+heir
+enormous
+lightly
+climbing
+pitched
+lessons
+pilots
+nepal
+ram
+google
+preparing
+brad
+louise
+renowned
+##₂
+liam
+##ably
+plaza
+shaw
+sophie
+brilliant
+bills
+##bar
+##nik
+fucking
+mainland
+server
+pleasant
+seized
+veterans
+jerked
+fail
+beta
+brush
+radiation
+stored
+warmth
+southeastern
+nate
+sin
+raced
+berkeley
+joke
+athlete
+designation
+trunk
+##low
+roland
+qualification
+archives
+heels
+artwork
+receives
+judicial
+reserves
+##bed
+woke
+installation
+abu
+floating
+fake
+lesser
+excitement
+interface
+concentrated
+addressed
+characteristic
+amanda
+saxophone
+monk
+auto
+##bus
+releasing
+egg
+dies
+interaction
+defender
+ce
+outbreak
+glory
+loving
+##bert
+sequel
+consciousness
+http
+awake
+ski
+enrolled
+##ress
+handling
+rookie
+brow
+somebody
+biography
+warfare
+amounts
+contracts
+presentation
+fabric
+dissolved
+challenged
+meter
+psychological
+lt
+elevated
+rally
+accurate
+##tha
+hospitals
+undergraduate
+specialist
+venezuela
+exhibit
+shed
+nursing
+protestant
+fluid
+structural
+footage
+jared
+consistent
+prey
+##ska
+succession
+reflect
+exile
+lebanon
+wiped
+suspect
+shanghai
+resting
+integration
+preservation
+marvel
+variant
+pirates
+sheep
+rounded
+capita
+sailing
+colonies
+manuscript
+deemed
+variations
+clarke
+functional
+emerging
+boxing
+relaxed
+curse
+azerbaijan
+heavyweight
+nickname
+editorial
+rang
+grid
+tightened
+earthquake
+flashed
+miguel
+rushing
+##ches
+improvements
+boxes
+brooks
+180
+consumption
+molecular
+felix
+societies
+repeatedly
+variation
+aids
+civic
+graphics
+professionals
+realm
+autonomous
+receiver
+delayed
+workshop
+militia
+chairs
+trump
+canyon
+##point
+harsh
+extending
+lovely
+happiness
+##jan
+stake
+eyebrows
+embassy
+wellington
+hannah
+##ella
+sony
+corners
+bishops
+swear
+cloth
+contents
+xi
+namely
+commenced
+1854
+stanford
+nashville
+courage
+graphic
+commitment
+garrison
+##bin
+hamlet
+clearing
+rebels
+attraction
+literacy
+cooking
+ruins
+temples
+jenny
+humanity
+celebrate
+hasn
+freight
+sixty
+rebel
+bastard
+##art
+newton
+##ada
+deer
+##ges
+##ching
+smiles
+delaware
+singers
+##ets
+approaching
+assists
+flame
+##ph
+boulevard
+barrel
+planted
+##ome
+pursuit
+##sia
+consequences
+posts
+shallow
+invitation
+rode
+depot
+ernest
+kane
+rod
+concepts
+preston
+topic
+chambers
+striking
+blast
+arrives
+descendants
+montgomery
+ranges
+worlds
+##lay
+##ari
+span
+chaos
+praise
+##ag
+fewer
+1855
+sanctuary
+mud
+fbi
+##ions
+programmes
+maintaining
+unity
+harper
+bore
+handsome
+closure
+tournaments
+thunder
+nebraska
+linda
+facade
+puts
+satisfied
+argentine
+dale
+cork
+dome
+panama
+##yl
+1858
+tasks
+experts
+##ates
+feeding
+equation
+##las
+##ida
+##tu
+engage
+bryan
+##ax
+um
+quartet
+melody
+disbanded
+sheffield
+blocked
+gasped
+delay
+kisses
+maggie
+connects
+##non
+sts
+poured
+creator
+publishers
+##we
+guided
+ellis
+extinct
+hug
+gaining
+##ord
+complicated
+##bility
+poll
+clenched
+investigate
+##use
+thereby
+quantum
+spine
+cdp
+humor
+kills
+administered
+semifinals
+##du
+encountered
+ignore
+##bu
+commentary
+##maker
+bother
+roosevelt
+140
+plains
+halfway
+flowing
+cultures
+crack
+imprisoned
+neighboring
+airline
+##ses
+##view
+##mate
+##ec
+gather
+wolves
+marathon
+transformed
+##ill
+cruise
+organisations
+carol
+punch
+exhibitions
+numbered
+alarm
+ratings
+daddy
+silently
+##stein
+queens
+colours
+impression
+guidance
+liu
+tactical
+##rat
+marshal
+della
+arrow
+##ings
+rested
+feared
+tender
+owns
+bitter
+advisor
+escort
+##ides
+spare
+farms
+grants
+##ene
+dragons
+encourage
+colleagues
+cameras
+##und
+sucked
+pile
+spirits
+prague
+statements
+suspension
+landmark
+fence
+torture
+recreation
+bags
+permanently
+survivors
+pond
+spy
+predecessor
+bombing
+coup
+##og
+protecting
+transformation
+glow
+##lands
+##book
+dug
+priests
+andrea
+feat
+barn
+jumping
+##chen
+##ologist
+##con
+casualties
+stern
+auckland
+pipe
+serie
+revealing
+ba
+##bel
+trevor
+mercy
+spectrum
+yang
+consist
+governing
+collaborated
+possessed
+epic
+comprises
+blew
+shane
+##ack
+lopez
+honored
+magical
+sacrifice
+judgment
+perceived
+hammer
+mtv
+baronet
+tune
+das
+missionary
+sheets
+350
+neutral
+oral
+threatening
+attractive
+shade
+aims
+seminary
+##master
+estates
+1856
+michel
+wounds
+refugees
+manufacturers
+##nic
+mercury
+syndrome
+porter
+##iya
+##din
+hamburg
+identification
+upstairs
+purse
+widened
+pause
+cared
+breathed
+affiliate
+santiago
+prevented
+celtic
+fisher
+125
+recruited
+byzantine
+reconstruction
+farther
+##mp
+diet
+sake
+au
+spite
+sensation
+##ert
+blank
+separation
+105
+##hon
+vladimir
+armies
+anime
+##lie
+accommodate
+orbit
+cult
+sofia
+archive
+##ify
+##box
+founders
+sustained
+disorder
+honours
+northeastern
+mia
+crops
+violet
+threats
+blanket
+fires
+canton
+followers
+southwestern
+prototype
+voyage
+assignment
+altered
+moderate
+protocol
+pistol
+##eo
+questioned
+brass
+lifting
+1852
+math
+authored
+##ual
+doug
+dimensional
+dynamic
+##san
+1851
+pronounced
+grateful
+quest
+uncomfortable
+boom
+presidency
+stevens
+relating
+politicians
+chen
+barrier
+quinn
+diana
+mosque
+tribal
+cheese
+palmer
+portions
+sometime
+chester
+treasure
+wu
+bend
+download
+millions
+reforms
+registration
+##osa
+consequently
+monitoring
+ate
+preliminary
+brandon
+invented
+ps
+eaten
+exterior
+intervention
+ports
+documented
+log
+displays
+lecture
+sally
+favourite
+##itz
+vermont
+lo
+invisible
+isle
+breed
+##ator
+journalists
+relay
+speaks
+backward
+explore
+midfielder
+actively
+stefan
+procedures
+cannon
+blond
+kenneth
+centered
+servants
+chains
+libraries
+malcolm
+essex
+henri
+slavery
+##hal
+facts
+fairy
+coached
+cassie
+cats
+washed
+cop
+##fi
+announcement
+item
+2000s
+vinyl
+activated
+marco
+frontier
+growled
+curriculum
+##das
+loyal
+accomplished
+leslie
+ritual
+kenny
+##00
+vii
+napoleon
+hollow
+hybrid
+jungle
+stationed
+friedrich
+counted
+##ulated
+platinum
+theatrical
+seated
+col
+rubber
+glen
+1840
+diversity
+healing
+extends
+id
+provisions
+administrator
+columbus
+##oe
+tributary
+te
+assured
+org
+##uous
+prestigious
+examined
+lectures
+grammy
+ronald
+associations
+bailey
+allan
+essays
+flute
+believing
+consultant
+proceedings
+travelling
+1853
+kit
+kerala
+yugoslavia
+buddy
+methodist
+##ith
+burial
+centres
+batman
+##nda
+discontinued
+bo
+dock
+stockholm
+lungs
+severely
+##nk
+citing
+manga
+##ugh
+steal
+mumbai
+iraqi
+robot
+celebrity
+bride
+broadcasts
+abolished
+pot
+joel
+overhead
+franz
+packed
+reconnaissance
+johann
+acknowledged
+introduce
+handled
+doctorate
+developments
+drinks
+alley
+palestine
+##nis
+##aki
+proceeded
+recover
+bradley
+grain
+patch
+afford
+infection
+nationalist
+legendary
+##ath
+interchange
+virtually
+gen
+gravity
+exploration
+amber
+vital
+wishes
+powell
+doctrine
+elbow
+screenplay
+##bird
+contribute
+indonesian
+pet
+creates
+##com
+enzyme
+kylie
+discipline
+drops
+manila
+hunger
+##ien
+layers
+suffer
+fever
+bits
+monica
+keyboard
+manages
+##hood
+searched
+appeals
+##bad
+testament
+grande
+reid
+##war
+beliefs
+congo
+##ification
+##dia
+si
+requiring
+##via
+casey
+1849
+regret
+streak
+rape
+depends
+syrian
+sprint
+pound
+tourists
+upcoming
+pub
+##xi
+tense
+##els
+practiced
+echo
+nationwide
+guild
+motorcycle
+liz
+##zar
+chiefs
+desired
+elena
+bye
+precious
+absorbed
+relatives
+booth
+pianist
+##mal
+citizenship
+exhausted
+wilhelm
+##ceae
+##hed
+noting
+quarterback
+urge
+hectares
+##gue
+ace
+holly
+##tal
+blonde
+davies
+parked
+sustainable
+stepping
+twentieth
+airfield
+galaxy
+nest
+chip
+##nell
+tan
+shaft
+paulo
+requirement
+##zy
+paradise
+tobacco
+trans
+renewed
+vietnamese
+##cker
+##ju
+suggesting
+catching
+holmes
+enjoying
+md
+trips
+colt
+holder
+butterfly
+nerve
+reformed
+cherry
+bowling
+trailer
+carriage
+goodbye
+appreciate
+toy
+joshua
+interactive
+enabled
+involve
+##kan
+collar
+determination
+bunch
+facebook
+recall
+shorts
+superintendent
+episcopal
+frustration
+giovanni
+nineteenth
+laser
+privately
+array
+circulation
+##ovic
+armstrong
+deals
+painful
+permit
+discrimination
+##wi
+aires
+retiring
+cottage
+ni
+##sta
+horizon
+ellen
+jamaica
+ripped
+fernando
+chapters
+playstation
+patron
+lecturer
+navigation
+behaviour
+genes
+georgian
+export
+solomon
+rivals
+swift
+seventeen
+rodriguez
+princeton
+independently
+sox
+1847
+arguing
+entity
+casting
+hank
+criteria
+oakland
+geographic
+milwaukee
+reflection
+expanding
+conquest
+dubbed
+##tv
+halt
+brave
+brunswick
+doi
+arched
+curtis
+divorced
+predominantly
+somerset
+streams
+ugly
+zoo
+horrible
+curved
+buenos
+fierce
+dictionary
+vector
+theological
+unions
+handful
+stability
+chan
+punjab
+segments
+##lly
+altar
+ignoring
+gesture
+monsters
+pastor
+##stone
+thighs
+unexpected
+operators
+abruptly
+coin
+compiled
+associates
+improving
+migration
+pin
+##ose
+compact
+collegiate
+reserved
+##urs
+quarterfinals
+roster
+restore
+assembled
+hurry
+oval
+##cies
+1846
+flags
+martha
+##del
+victories
+sharply
+##rated
+argues
+deadly
+neo
+drawings
+symbols
+performer
+##iel
+griffin
+restrictions
+editing
+andrews
+java
+journals
+arabia
+compositions
+dee
+pierce
+removing
+hindi
+casino
+runway
+civilians
+minds
+nasa
+hotels
+##zation
+refuge
+rent
+retain
+potentially
+conferences
+suburban
+conducting
+##tto
+##tions
+##tle
+descended
+massacre
+##cal
+ammunition
+terrain
+fork
+souls
+counts
+chelsea
+durham
+drives
+cab
+##bank
+perth
+realizing
+palestinian
+finn
+simpson
+##dal
+betty
+##ule
+moreover
+particles
+cardinals
+tent
+evaluation
+extraordinary
+##oid
+inscription
+##works
+wednesday
+chloe
+maintains
+panels
+ashley
+trucks
+##nation
+cluster
+sunlight
+strikes
+zhang
+##wing
+dialect
+canon
+##ap
+tucked
+##ws
+collecting
+##mas
+##can
+##sville
+maker
+quoted
+evan
+franco
+aria
+buying
+cleaning
+eva
+closet
+provision
+apollo
+clinic
+rat
+##ez
+necessarily
+ac
+##gle
+##ising
+venues
+flipped
+cent
+spreading
+trustees
+checking
+authorized
+##sco
+disappointed
+##ado
+notion
+duration
+trumpet
+hesitated
+topped
+brussels
+rolls
+theoretical
+hint
+define
+aggressive
+repeat
+wash
+peaceful
+optical
+width
+allegedly
+mcdonald
+strict
+copyright
+##illa
+investors
+mar
+jam
+witnesses
+sounding
+miranda
+michelle
+privacy
+hugo
+harmony
+##pp
+valid
+lynn
+glared
+nina
+102
+headquartered
+diving
+boarding
+gibson
+##ncy
+albanian
+marsh
+routine
+dealt
+enhanced
+er
+intelligent
+substance
+targeted
+enlisted
+discovers
+spinning
+observations
+pissed
+smoking
+rebecca
+capitol
+visa
+varied
+costume
+seemingly
+indies
+compensation
+surgeon
+thursday
+arsenal
+westminster
+suburbs
+rid
+anglican
+##ridge
+knots
+foods
+alumni
+lighter
+fraser
+whoever
+portal
+scandal
+##ray
+gavin
+advised
+instructor
+flooding
+terrorist
+##ale
+teenage
+interim
+senses
+duck
+teen
+thesis
+abby
+eager
+overcome
+##ile
+newport
+glenn
+rises
+shame
+##cc
+prompted
+priority
+forgot
+bomber
+nicolas
+protective
+360
+cartoon
+katherine
+breeze
+lonely
+trusted
+henderson
+richardson
+relax
+banner
+candy
+palms
+remarkable
+##rio
+legends
+cricketer
+essay
+ordained
+edmund
+rifles
+trigger
+##uri
+##away
+sail
+alert
+1830
+audiences
+penn
+sussex
+siblings
+pursued
+indianapolis
+resist
+rosa
+consequence
+succeed
+avoided
+1845
+##ulation
+inland
+##tie
+##nna
+counsel
+profession
+chronicle
+hurried
+##una
+eyebrow
+eventual
+bleeding
+innovative
+cure
+##dom
+committees
+accounting
+con
+scope
+hardy
+heather
+tenor
+gut
+herald
+codes
+tore
+scales
+wagon
+##oo
+luxury
+tin
+prefer
+fountain
+triangle
+bonds
+darling
+convoy
+dried
+traced
+beings
+troy
+accidentally
+slam
+findings
+smelled
+joey
+lawyers
+outcome
+steep
+bosnia
+configuration
+shifting
+toll
+brook
+performers
+lobby
+philosophical
+construct
+shrine
+aggregate
+boot
+cox
+phenomenon
+savage
+insane
+solely
+reynolds
+lifestyle
+##ima
+nationally
+holdings
+consideration
+enable
+edgar
+mo
+mama
+##tein
+fights
+relegation
+chances
+atomic
+hub
+conjunction
+awkward
+reactions
+currency
+finale
+kumar
+underwent
+steering
+elaborate
+gifts
+comprising
+melissa
+veins
+reasonable
+sunshine
+chi
+solve
+trails
+inhabited
+elimination
+ethics
+huh
+ana
+molly
+consent
+apartments
+layout
+marines
+##ces
+hunters
+bulk
+##oma
+hometown
+##wall
+##mont
+cracked
+reads
+neighbouring
+withdrawn
+admission
+wingspan
+damned
+anthology
+lancashire
+brands
+batting
+forgive
+cuban
+awful
+##lyn
+104
+dimensions
+imagination
+##ade
+dante
+##ship
+tracking
+desperately
+goalkeeper
+##yne
+groaned
+workshops
+confident
+burton
+gerald
+milton
+circus
+uncertain
+slope
+copenhagen
+sophia
+fog
+philosopher
+portraits
+accent
+cycling
+varying
+gripped
+larvae
+garrett
+specified
+scotia
+mature
+luther
+kurt
+rap
+##kes
+aerial
+750
+ferdinand
+heated
+es
+transported
+##shan
+safely
+nonetheless
+##orn
+##gal
+motors
+demanding
+##sburg
+startled
+##brook
+ally
+generate
+caps
+ghana
+stained
+demo
+mentions
+beds
+ap
+afterward
+diary
+##bling
+utility
+##iro
+richards
+1837
+conspiracy
+conscious
+shining
+footsteps
+observer
+cyprus
+urged
+loyalty
+developer
+probability
+olive
+upgraded
+gym
+miracle
+insects
+graves
+1844
+ourselves
+hydrogen
+amazon
+katie
+tickets
+poets
+##pm
+planes
+##pan
+prevention
+witnessed
+dense
+jin
+randy
+tang
+warehouse
+monroe
+bang
+archived
+elderly
+investigations
+alec
+granite
+mineral
+conflicts
+controlling
+aboriginal
+carlo
+##zu
+mechanics
+stan
+stark
+rhode
+skirt
+est
+##berry
+bombs
+respected
+##horn
+imposed
+limestone
+deny
+nominee
+memphis
+grabbing
+disabled
+##als
+amusement
+aa
+frankfurt
+corn
+referendum
+varies
+slowed
+disk
+firms
+unconscious
+incredible
+clue
+sue
+##zhou
+twist
+##cio
+joins
+idaho
+chad
+developers
+computing
+destroyer
+103
+mortal
+tucker
+kingston
+choices
+yu
+carson
+1800
+os
+whitney
+geneva
+pretend
+dimension
+staged
+plateau
+maya
+##une
+freestyle
+##bc
+rovers
+hiv
+##ids
+tristan
+classroom
+prospect
+##hus
+honestly
+diploma
+lied
+thermal
+auxiliary
+feast
+unlikely
+iata
+##tel
+morocco
+pounding
+treasury
+lithuania
+considerably
+1841
+dish
+1812
+geological
+matching
+stumbled
+destroying
+marched
+brien
+advances
+cake
+nicole
+belle
+settling
+measuring
+directing
+##mie
+tuesday
+bassist
+capabilities
+stunned
+fraud
+torpedo
+##list
+##phone
+anton
+wisdom
+surveillance
+ruined
+##ulate
+lawsuit
+healthcare
+theorem
+halls
+trend
+aka
+horizontal
+dozens
+acquire
+lasting
+swim
+hawk
+gorgeous
+fees
+vicinity
+decrease
+adoption
+tactics
+##ography
+pakistani
+##ole
+draws
+##hall
+willie
+burke
+heath
+algorithm
+integral
+powder
+elliott
+brigadier
+jackie
+tate
+varieties
+darker
+##cho
+lately
+cigarette
+specimens
+adds
+##ree
+##ensis
+##inger
+exploded
+finalist
+cia
+murders
+wilderness
+arguments
+nicknamed
+acceptance
+onwards
+manufacture
+robertson
+jets
+tampa
+enterprises
+blog
+loudly
+composers
+nominations
+1838
+ai
+malta
+inquiry
+automobile
+hosting
+viii
+rays
+tilted
+grief
+museums
+strategies
+furious
+euro
+equality
+cohen
+poison
+surrey
+wireless
+governed
+ridiculous
+moses
+##esh
+##room
+vanished
+##ito
+barnes
+attract
+morrison
+istanbul
+##iness
+absent
+rotation
+petition
+janet
+##logical
+satisfaction
+custody
+deliberately
+observatory
+comedian
+surfaces
+pinyin
+novelist
+strictly
+canterbury
+oslo
+monks
+embrace
+ibm
+jealous
+photograph
+continent
+dorothy
+marina
+doc
+excess
+holden
+allegations
+explaining
+stack
+avoiding
+lance
+storyline
+majesty
+poorly
+spike
+dos
+bradford
+raven
+travis
+classics
+proven
+voltage
+pillow
+fists
+butt
+1842
+interpreted
+##car
+1839
+gage
+telegraph
+lens
+promising
+expelled
+casual
+collector
+zones
+##min
+silly
+nintendo
+##kh
+##bra
+downstairs
+chef
+suspicious
+afl
+flies
+vacant
+uganda
+pregnancy
+condemned
+lutheran
+estimates
+cheap
+decree
+saxon
+proximity
+stripped
+idiot
+deposits
+contrary
+presenter
+magnus
+glacier
+im
+offense
+edwin
+##ori
+upright
+##long
+bolt
+##ois
+toss
+geographical
+##izes
+environments
+delicate
+marking
+abstract
+xavier
+nails
+windsor
+plantation
+occurring
+equity
+saskatchewan
+fears
+drifted
+sequences
+vegetation
+revolt
+##stic
+1843
+sooner
+fusion
+opposing
+nato
+skating
+1836
+secretly
+ruin
+lease
+##oc
+edit
+##nne
+flora
+anxiety
+ruby
+##ological
+##mia
+tel
+bout
+taxi
+emmy
+frost
+rainbow
+compounds
+foundations
+rainfall
+assassination
+nightmare
+dominican
+##win
+achievements
+deserve
+orlando
+intact
+armenia
+##nte
+calgary
+valentine
+106
+marion
+proclaimed
+theodore
+bells
+courtyard
+thigh
+gonzalez
+console
+troop
+minimal
+monte
+everyday
+##ence
+##if
+supporter
+terrorism
+buck
+openly
+presbyterian
+activists
+carpet
+##iers
+rubbing
+uprising
+##yi
+cute
+conceived
+legally
+##cht
+millennium
+cello
+velocity
+ji
+rescued
+cardiff
+1835
+rex
+concentrate
+senators
+beard
+rendered
+glowing
+battalions
+scouts
+competitors
+sculptor
+catalogue
+arctic
+ion
+raja
+bicycle
+wow
+glancing
+lawn
+##woman
+gentleman
+lighthouse
+publish
+predicted
+calculated
+##val
+variants
+##gne
+strain
+##ui
+winston
+deceased
+##nus
+touchdowns
+brady
+caleb
+sinking
+echoed
+crush
+hon
+blessed
+protagonist
+hayes
+endangered
+magnitude
+editors
+##tine
+estimate
+responsibilities
+##mel
+backup
+laying
+consumed
+sealed
+zurich
+lovers
+frustrated
+##eau
+ahmed
+kicking
+mit
+treasurer
+1832
+biblical
+refuse
+terrified
+pump
+agrees
+genuine
+imprisonment
+refuses
+plymouth
+##hen
+lou
+##nen
+tara
+trembling
+antarctic
+ton
+learns
+##tas
+crap
+crucial
+faction
+atop
+##borough
+wrap
+lancaster
+odds
+hopkins
+erik
+lyon
+##eon
+bros
+##ode
+snap
+locality
+tips
+empress
+crowned
+cal
+acclaimed
+chuckled
+##ory
+clara
+sends
+mild
+towel
+##fl
+##day
+##а
+wishing
+assuming
+interviewed
+##bal
+##die
+interactions
+eden
+cups
+helena
+##lf
+indie
+beck
+##fire
+batteries
+filipino
+wizard
+parted
+##lam
+traces
+##born
+rows
+idol
+albany
+delegates
+##ees
+##sar
+discussions
+##ex
+notre
+instructed
+belgrade
+highways
+suggestion
+lauren
+possess
+orientation
+alexandria
+abdul
+beats
+salary
+reunion
+ludwig
+alright
+wagner
+intimate
+pockets
+slovenia
+hugged
+brighton
+merchants
+cruel
+stole
+trek
+slopes
+repairs
+enrollment
+politically
+underlying
+promotional
+counting
+boeing
+##bb
+isabella
+naming
+##и
+keen
+bacteria
+listing
+separately
+belfast
+ussr
+450
+lithuanian
+anybody
+ribs
+sphere
+martinez
+cock
+embarrassed
+proposals
+fragments
+nationals
+##fs
+##wski
+premises
+fin
+1500
+alpine
+matched
+freely
+bounded
+jace
+sleeve
+##af
+gaming
+pier
+populated
+evident
+##like
+frances
+flooded
+##dle
+frightened
+pour
+trainer
+framed
+visitor
+challenging
+pig
+wickets
+##fold
+infected
+email
+##pes
+arose
+##aw
+reward
+ecuador
+oblast
+vale
+ch
+shuttle
+##usa
+bach
+rankings
+forbidden
+cornwall
+accordance
+salem
+consumers
+bruno
+fantastic
+toes
+machinery
+resolved
+julius
+remembering
+propaganda
+iceland
+bombardment
+tide
+contacts
+wives
+##rah
+concerto
+macdonald
+albania
+implement
+daisy
+tapped
+sudan
+helmet
+angela
+mistress
+##lic
+crop
+sunk
+finest
+##craft
+hostile
+##ute
+##tsu
+boxer
+fr
+paths
+adjusted
+habit
+ballot
+supervision
+soprano
+##zen
+bullets
+wicked
+sunset
+regiments
+disappear
+lamp
+performs
+app
+##gia
+##oa
+rabbit
+digging
+incidents
+entries
+##cion
+dishes
+##oi
+introducing
+##ati
+##fied
+freshman
+slot
+jill
+tackles
+baroque
+backs
+##iest
+lone
+sponsor
+destiny
+altogether
+convert
+##aro
+consensus
+shapes
+demonstration
+basically
+feminist
+auction
+artifacts
+##bing
+strongest
+twitter
+halifax
+2019
+allmusic
+mighty
+smallest
+precise
+alexandra
+viola
+##los
+##ille
+manuscripts
+##illo
+dancers
+ari
+managers
+monuments
+blades
+barracks
+springfield
+maiden
+consolidated
+electron
+##end
+berry
+airing
+wheat
+nobel
+inclusion
+blair
+payments
+geography
+bee
+cc
+eleanor
+react
+##hurst
+afc
+manitoba
+##yu
+su
+lineup
+fitness
+recreational
+investments
+airborne
+disappointment
+##dis
+edmonton
+viewing
+##row
+renovation
+##cast
+infant
+bankruptcy
+roses
+aftermath
+pavilion
+##yer
+carpenter
+withdrawal
+ladder
+##hy
+discussing
+popped
+reliable
+agreements
+rochester
+##abad
+curves
+bombers
+220
+rao
+reverend
+decreased
+choosing
+107
+stiff
+consulting
+naples
+crawford
+tracy
+ka
+ribbon
+cops
+##lee
+crushed
+deciding
+unified
+teenager
+accepting
+flagship
+explorer
+poles
+sanchez
+inspection
+revived
+skilled
+induced
+exchanged
+flee
+locals
+tragedy
+swallow
+loading
+hanna
+demonstrate
+##ela
+salvador
+flown
+contestants
+civilization
+##ines
+wanna
+rhodes
+fletcher
+hector
+knocking
+considers
+##ough
+nash
+mechanisms
+sensed
+mentally
+walt
+unclear
+##eus
+renovated
+madame
+##cks
+crews
+governmental
+##hin
+undertaken
+monkey
+##ben
+##ato
+fatal
+armored
+copa
+caves
+governance
+grasp
+perception
+certification
+froze
+damp
+tugged
+wyoming
+##rg
+##ero
+newman
+##lor
+nerves
+curiosity
+graph
+115
+##ami
+withdraw
+tunnels
+dull
+meredith
+moss
+exhibits
+neighbors
+communicate
+accuracy
+explored
+raiders
+republicans
+secular
+kat
+superman
+penny
+criticised
+##tch
+freed
+update
+conviction
+wade
+ham
+likewise
+delegation
+gotta
+doll
+promises
+technological
+myth
+nationality
+resolve
+convent
+##mark
+sharon
+dig
+sip
+coordinator
+entrepreneur
+fold
+##dine
+capability
+councillor
+synonym
+blown
+swan
+cursed
+1815
+jonas
+haired
+sofa
+canvas
+keeper
+rivalry
+##hart
+rapper
+speedway
+swords
+postal
+maxwell
+estonia
+potter
+recurring
+##nn
+##ave
+errors
+##oni
+cognitive
+1834
+##²
+claws
+nadu
+roberto
+bce
+wrestler
+ellie
+##ations
+infinite
+ink
+##tia
+presumably
+finite
+staircase
+108
+noel
+patricia
+nacional
+##cation
+chill
+eternal
+tu
+preventing
+prussia
+fossil
+limbs
+##logist
+ernst
+frog
+perez
+rene
+##ace
+pizza
+prussian
+##ios
+##vy
+molecules
+regulatory
+answering
+opinions
+sworn
+lengths
+supposedly
+hypothesis
+upward
+habitats
+seating
+ancestors
+drank
+yield
+hd
+synthesis
+researcher
+modest
+##var
+mothers
+peered
+voluntary
+homeland
+##the
+acclaim
+##igan
+static
+valve
+luxembourg
+alto
+carroll
+fe
+receptor
+norton
+ambulance
+##tian
+johnston
+catholics
+depicting
+jointly
+elephant
+gloria
+mentor
+badge
+ahmad
+distinguish
+remarked
+councils
+precisely
+allison
+advancing
+detection
+crowded
+##10
+cooperative
+ankle
+mercedes
+dagger
+surrendered
+pollution
+commit
+subway
+jeffrey
+lesson
+sculptures
+provider
+##fication
+membrane
+timothy
+rectangular
+fiscal
+heating
+teammate
+basket
+particle
+anonymous
+deployment
+##ple
+missiles
+courthouse
+proportion
+shoe
+sec
+##ller
+complaints
+forbes
+blacks
+abandon
+remind
+sizes
+overwhelming
+autobiography
+natalie
+##awa
+risks
+contestant
+countryside
+babies
+scorer
+invaded
+enclosed
+proceed
+hurling
+disorders
+##cu
+reflecting
+continuously
+cruiser
+graduates
+freeway
+investigated
+ore
+deserved
+maid
+blocking
+phillip
+jorge
+shakes
+dove
+mann
+variables
+lacked
+burden
+accompanying
+que
+consistently
+organizing
+provisional
+complained
+endless
+##rm
+tubes
+juice
+georges
+krishna
+mick
+labels
+thriller
+##uch
+laps
+arcade
+sage
+snail
+##table
+shannon
+fi
+laurence
+seoul
+vacation
+presenting
+hire
+churchill
+surprisingly
+prohibited
+savannah
+technically
+##oli
+170
+##lessly
+testimony
+suited
+speeds
+toys
+romans
+mlb
+flowering
+measurement
+talented
+kay
+settings
+charleston
+expectations
+shattered
+achieving
+triumph
+ceremonies
+portsmouth
+lanes
+mandatory
+loser
+stretching
+cologne
+realizes
+seventy
+cornell
+careers
+webb
+##ulating
+americas
+budapest
+ava
+suspicion
+##ison
+yo
+conrad
+##hai
+sterling
+jessie
+rector
+##az
+1831
+transform
+organize
+loans
+christine
+volcanic
+warrant
+slender
+summers
+subfamily
+newer
+danced
+dynamics
+rhine
+proceeds
+heinrich
+gastropod
+commands
+sings
+facilitate
+easter
+ra
+positioned
+responses
+expense
+fruits
+yanked
+imported
+25th
+velvet
+vic
+primitive
+tribune
+baldwin
+neighbourhood
+donna
+rip
+hay
+pr
+##uro
+1814
+espn
+welcomed
+##aria
+qualifier
+glare
+highland
+timing
+##cted
+shells
+eased
+geometry
+louder
+exciting
+slovakia
+##sion
+##iz
+##lot
+savings
+prairie
+##ques
+marching
+rafael
+tonnes
+##lled
+curtain
+preceding
+shy
+heal
+greene
+worthy
+##pot
+detachment
+bury
+sherman
+##eck
+reinforced
+seeks
+bottles
+contracted
+duchess
+outfit
+walsh
+##sc
+mickey
+##ase
+geoffrey
+archer
+squeeze
+dawson
+eliminate
+invention
+##enberg
+neal
+##eth
+stance
+dealer
+coral
+maple
+retire
+polo
+simplified
+##ht
+1833
+hid
+watts
+backwards
+jules
+##oke
+genesis
+mt
+frames
+rebounds
+burma
+woodland
+moist
+santos
+whispers
+drained
+subspecies
+##aa
+streaming
+ulster
+burnt
+correspondence
+maternal
+gerard
+denis
+stealing
+##load
+genius
+duchy
+##oria
+inaugurated
+momentum
+suits
+placement
+sovereign
+clause
+thames
+##hara
+confederation
+reservation
+sketch
+yankees
+lets
+rotten
+charm
+hal
+verses
+ultra
+commercially
+dot
+salon
+citation
+adopt
+winnipeg
+mist
+allocated
+cairo
+##boy
+jenkins
+interference
+objectives
+##wind
+1820
+portfolio
+armoured
+sectors
+##eh
+initiatives
+##world
+integrity
+exercises
+robe
+tap
+ab
+gazed
+##tones
+distracted
+rulers
+111
+favorable
+jerome
+tended
+cart
+factories
+##eri
+diplomat
+valued
+gravel
+charitable
+##try
+calvin
+exploring
+chang
+shepherd
+terrace
+pdf
+pupil
+##ural
+reflects
+ups
+##rch
+governors
+shelf
+depths
+##nberg
+trailed
+crest
+tackle
+##nian
+##ats
+hatred
+##kai
+clare
+makers
+ethiopia
+longtime
+detected
+embedded
+lacking
+slapped
+rely
+thomson
+anticipation
+iso
+morton
+successive
+agnes
+screenwriter
+straightened
+philippe
+playwright
+haunted
+licence
+iris
+intentions
+sutton
+112
+logical
+correctly
+##weight
+branded
+licked
+tipped
+silva
+ricky
+narrator
+requests
+##ents
+greeted
+supernatural
+cow
+##wald
+lung
+refusing
+employer
+strait
+gaelic
+liner
+##piece
+zoe
+sabha
+##mba
+driveway
+harvest
+prints
+bates
+reluctantly
+threshold
+algebra
+ira
+wherever
+coupled
+240
+assumption
+picks
+##air
+designers
+raids
+gentlemen
+##ean
+roller
+blowing
+leipzig
+locks
+screw
+dressing
+strand
+##lings
+scar
+dwarf
+depicts
+##nu
+nods
+##mine
+differ
+boris
+##eur
+yuan
+flip
+##gie
+mob
+invested
+questioning
+applying
+##ture
+shout
+##sel
+gameplay
+blamed
+illustrations
+bothered
+weakness
+rehabilitation
+##of
+##zes
+envelope
+rumors
+miners
+leicester
+subtle
+kerry
+##ico
+ferguson
+##fu
+premiership
+ne
+##cat
+bengali
+prof
+catches
+remnants
+dana
+##rily
+shouting
+presidents
+baltic
+ought
+ghosts
+dances
+sailors
+shirley
+fancy
+dominic
+##bie
+madonna
+##rick
+bark
+buttons
+gymnasium
+ashes
+liver
+toby
+oath
+providence
+doyle
+evangelical
+nixon
+cement
+carnegie
+embarked
+hatch
+surroundings
+guarantee
+needing
+pirate
+essence
+##bee
+filter
+crane
+hammond
+projected
+immune
+percy
+twelfth
+##ult
+regent
+doctoral
+damon
+mikhail
+##ichi
+lu
+critically
+elect
+realised
+abortion
+acute
+screening
+mythology
+steadily
+##fc
+frown
+nottingham
+kirk
+wa
+minneapolis
+##rra
+module
+algeria
+mc
+nautical
+encounters
+surprising
+statues
+availability
+shirts
+pie
+alma
+brows
+munster
+mack
+soup
+crater
+tornado
+sanskrit
+cedar
+explosive
+bordered
+dixon
+planets
+stamp
+exam
+happily
+##bble
+carriers
+kidnapped
+##vis
+accommodation
+emigrated
+##met
+knockout
+correspondent
+violation
+profits
+peaks
+lang
+specimen
+agenda
+ancestry
+pottery
+spelling
+equations
+obtaining
+ki
+linking
+1825
+debris
+asylum
+##20
+buddhism
+teddy
+##ants
+gazette
+##nger
+##sse
+dental
+eligibility
+utc
+fathers
+averaged
+zimbabwe
+francesco
+coloured
+hissed
+translator
+lynch
+mandate
+humanities
+mackenzie
+uniforms
+lin
+##iana
+##gio
+asset
+mhz
+fitting
+samantha
+genera
+wei
+rim
+beloved
+shark
+riot
+entities
+expressions
+indo
+carmen
+slipping
+owing
+abbot
+neighbor
+sidney
+##av
+rats
+recommendations
+encouraging
+squadrons
+anticipated
+commanders
+conquered
+##oto
+donations
+diagnosed
+##mond
+divide
+##iva
+guessed
+decoration
+vernon
+auditorium
+revelation
+conversations
+##kers
+##power
+herzegovina
+dash
+alike
+protested
+lateral
+herman
+accredited
+mg
+##gent
+freeman
+mel
+fiji
+crow
+crimson
+##rine
+livestock
+##pped
+humanitarian
+bored
+oz
+whip
+##lene
+##ali
+legitimate
+alter
+grinning
+spelled
+anxious
+oriental
+wesley
+##nin
+##hole
+carnival
+controller
+detect
+##ssa
+bowed
+educator
+kosovo
+macedonia
+##sin
+occupy
+mastering
+stephanie
+janeiro
+para
+unaware
+nurses
+noon
+135
+cam
+hopefully
+ranger
+combine
+sociology
+polar
+rica
+##eer
+neill
+##sman
+holocaust
+##ip
+doubled
+lust
+1828
+109
+decent
+cooling
+unveiled
+##card
+1829
+nsw
+homer
+chapman
+meyer
+##gin
+dive
+mae
+reagan
+expertise
+##gled
+darwin
+brooke
+sided
+prosecution
+investigating
+comprised
+petroleum
+genres
+reluctant
+differently
+trilogy
+johns
+vegetables
+corpse
+highlighted
+lounge
+pension
+unsuccessfully
+elegant
+aided
+ivory
+beatles
+amelia
+cain
+dubai
+sunny
+immigrant
+babe
+click
+##nder
+underwater
+pepper
+combining
+mumbled
+atlas
+horns
+accessed
+ballad
+physicians
+homeless
+gestured
+rpm
+freak
+louisville
+corporations
+patriots
+prizes
+rational
+warn
+modes
+decorative
+overnight
+din
+troubled
+phantom
+##ort
+monarch
+sheer
+##dorf
+generals
+guidelines
+organs
+addresses
+##zon
+enhance
+curling
+parishes
+cord
+##kie
+linux
+caesar
+deutsche
+bavaria
+##bia
+coleman
+cyclone
+##eria
+bacon
+petty
+##yama
+##old
+hampton
+diagnosis
+1824
+throws
+complexity
+rita
+disputed
+##₃
+pablo
+##sch
+marketed
+trafficking
+##ulus
+examine
+plague
+formats
+##oh
+vault
+faithful
+##bourne
+webster
+##ox
+highlights
+##ient
+##ann
+phones
+vacuum
+sandwich
+modeling
+##gated
+bolivia
+clergy
+qualities
+isabel
+##nas
+##ars
+wears
+screams
+reunited
+annoyed
+bra
+##ancy
+##rate
+differential
+transmitter
+tattoo
+container
+poker
+##och
+excessive
+resides
+cowboys
+##tum
+augustus
+trash
+providers
+statute
+retreated
+balcony
+reversed
+void
+storey
+preceded
+masses
+leap
+laughs
+neighborhoods
+wards
+schemes
+falcon
+santo
+battlefield
+pad
+ronnie
+thread
+lesbian
+venus
+##dian
+beg
+sandstone
+daylight
+punched
+gwen
+analog
+stroked
+wwe
+acceptable
+measurements
+dec
+toxic
+##kel
+adequate
+surgical
+economist
+parameters
+varsity
+##sberg
+quantity
+ella
+##chy
+##rton
+countess
+generating
+precision
+diamonds
+expressway
+ga
+##ı
+1821
+uruguay
+talents
+galleries
+expenses
+scanned
+colleague
+outlets
+ryder
+lucien
+##ila
+paramount
+##bon
+syracuse
+dim
+fangs
+gown
+sweep
+##sie
+toyota
+missionaries
+websites
+##nsis
+sentences
+adviser
+val
+trademark
+spells
+##plane
+patience
+starter
+slim
+##borg
+toe
+incredibly
+shoots
+elliot
+nobility
+##wyn
+cowboy
+endorsed
+gardner
+tendency
+persuaded
+organisms
+emissions
+kazakhstan
+amused
+boring
+chips
+themed
+##hand
+llc
+constantinople
+chasing
+systematic
+guatemala
+borrowed
+erin
+carey
+##hard
+highlands
+struggles
+1810
+##ifying
+##ced
+wong
+exceptions
+develops
+enlarged
+kindergarten
+castro
+##ern
+##rina
+leigh
+zombie
+juvenile
+##most
+consul
+##nar
+sailor
+hyde
+clarence
+intensive
+pinned
+nasty
+useless
+jung
+clayton
+stuffed
+exceptional
+ix
+apostolic
+230
+transactions
+##dge
+exempt
+swinging
+cove
+religions
+##ash
+shields
+dairy
+bypass
+190
+pursuing
+bug
+joyce
+bombay
+chassis
+southampton
+chat
+interact
+redesignated
+##pen
+nascar
+pray
+salmon
+rigid
+regained
+malaysian
+grim
+publicity
+constituted
+capturing
+toilet
+delegate
+purely
+tray
+drift
+loosely
+striker
+weakened
+trinidad
+mitch
+itv
+defines
+transmitted
+ming
+scarlet
+nodding
+fitzgerald
+fu
+narrowly
+sp
+tooth
+standings
+virtue
+##₁
+##wara
+##cting
+chateau
+gloves
+lid
+##nel
+hurting
+conservatory
+##pel
+sinclair
+reopened
+sympathy
+nigerian
+strode
+advocated
+optional
+chronic
+discharge
+##rc
+suck
+compatible
+laurel
+stella
+shi
+fails
+wage
+dodge
+128
+informal
+sorts
+levi
+buddha
+villagers
+##aka
+chronicles
+heavier
+summoned
+gateway
+3000
+eleventh
+jewelry
+translations
+accordingly
+seas
+##ency
+fiber
+pyramid
+cubic
+dragging
+##ista
+caring
+##ops
+android
+contacted
+lunar
+##dt
+kai
+lisbon
+patted
+1826
+sacramento
+theft
+madagascar
+subtropical
+disputes
+ta
+holidays
+piper
+willow
+mare
+cane
+itunes
+newfoundland
+benny
+companions
+dong
+raj
+observe
+roar
+charming
+plaque
+tibetan
+fossils
+enacted
+manning
+bubble
+tina
+tanzania
+##eda
+##hir
+funk
+swamp
+deputies
+cloak
+ufc
+scenario
+par
+scratch
+metals
+anthem
+guru
+engaging
+specially
+##boat
+dialects
+nineteen
+cecil
+duet
+disability
+messenger
+unofficial
+##lies
+defunct
+eds
+moonlight
+drainage
+surname
+puzzle
+honda
+switching
+conservatives
+mammals
+knox
+broadcaster
+sidewalk
+cope
+##ried
+benson
+princes
+peterson
+##sal
+bedford
+sharks
+eli
+wreck
+alberto
+gasp
+archaeology
+lgbt
+teaches
+securities
+madness
+compromise
+waving
+coordination
+davidson
+visions
+leased
+possibilities
+eighty
+jun
+fernandez
+enthusiasm
+assassin
+sponsorship
+reviewer
+kingdoms
+estonian
+laboratories
+##fy
+##nal
+applies
+verb
+celebrations
+##zzo
+rowing
+lightweight
+sadness
+submit
+mvp
+balanced
+dude
+##vas
+explicitly
+metric
+magnificent
+mound
+brett
+mohammad
+mistakes
+irregular
+##hing
+##ass
+sanders
+betrayed
+shipped
+surge
+##enburg
+reporters
+termed
+georg
+pity
+verbal
+bulls
+abbreviated
+enabling
+appealed
+##are
+##atic
+sicily
+sting
+heel
+sweetheart
+bart
+spacecraft
+brutal
+monarchy
+##tter
+aberdeen
+cameo
+diane
+##ub
+survivor
+clyde
+##aries
+complaint
+##makers
+clarinet
+delicious
+chilean
+karnataka
+coordinates
+1818
+panties
+##rst
+pretending
+ar
+dramatically
+kiev
+bella
+tends
+distances
+113
+catalog
+launching
+instances
+telecommunications
+portable
+lindsay
+vatican
+##eim
+angles
+aliens
+marker
+stint
+screens
+bolton
+##rne
+judy
+wool
+benedict
+plasma
+europa
+spark
+imaging
+filmmaker
+swiftly
+##een
+contributor
+##nor
+opted
+stamps
+apologize
+financing
+butter
+gideon
+sophisticated
+alignment
+avery
+chemicals
+yearly
+speculation
+prominence
+professionally
+##ils
+immortal
+institutional
+inception
+wrists
+identifying
+tribunal
+derives
+gains
+##wo
+papal
+preference
+linguistic
+vince
+operative
+brewery
+##ont
+unemployment
+boyd
+##ured
+##outs
+albeit
+prophet
+1813
+bi
+##rr
+##face
+##rad
+quarterly
+asteroid
+cleaned
+radius
+temper
+##llen
+telugu
+jerk
+viscount
+menu
+##ote
+glimpse
+##aya
+yacht
+hawaiian
+baden
+##rl
+laptop
+readily
+##gu
+monetary
+offshore
+scots
+watches
+##yang
+##arian
+upgrade
+needle
+xbox
+lea
+encyclopedia
+flank
+fingertips
+##pus
+delight
+teachings
+confirm
+roth
+beaches
+midway
+winters
+##iah
+teasing
+daytime
+beverly
+gambling
+bonnie
+##backs
+regulated
+clement
+hermann
+tricks
+knot
+##shing
+##uring
+##vre
+detached
+ecological
+owed
+specialty
+byron
+inventor
+bats
+stays
+screened
+unesco
+midland
+trim
+affection
+##ander
+##rry
+jess
+thoroughly
+feedback
+##uma
+chennai
+strained
+heartbeat
+wrapping
+overtime
+pleaded
+##sworth
+mon
+leisure
+oclc
+##tate
+##ele
+feathers
+angelo
+thirds
+nuts
+surveys
+clever
+gill
+commentator
+##dos
+darren
+rides
+gibraltar
+##nc
+##mu
+dissolution
+dedication
+shin
+meals
+saddle
+elvis
+reds
+chaired
+taller
+appreciation
+functioning
+niece
+favored
+advocacy
+robbie
+criminals
+suffolk
+yugoslav
+passport
+constable
+congressman
+hastings
+vera
+##rov
+consecrated
+sparks
+ecclesiastical
+confined
+##ovich
+muller
+floyd
+nora
+1822
+paved
+1827
+cumberland
+ned
+saga
+spiral
+##flow
+appreciated
+yi
+collaborative
+treating
+similarities
+feminine
+finishes
+##ib
+jade
+import
+##nse
+##hot
+champagne
+mice
+securing
+celebrities
+helsinki
+attributes
+##gos
+cousins
+phases
+ache
+lucia
+gandhi
+submission
+vicar
+spear
+shine
+tasmania
+biting
+detention
+constitute
+tighter
+seasonal
+##gus
+terrestrial
+matthews
+##oka
+effectiveness
+parody
+philharmonic
+##onic
+1816
+strangers
+encoded
+consortium
+guaranteed
+regards
+shifts
+tortured
+collision
+supervisor
+inform
+broader
+insight
+theaters
+armour
+emeritus
+blink
+incorporates
+mapping
+##50
+##ein
+handball
+flexible
+##nta
+substantially
+generous
+thief
+##own
+carr
+loses
+1793
+prose
+ucla
+romeo
+generic
+metallic
+realization
+damages
+mk
+commissioners
+zach
+default
+##ther
+helicopters
+lengthy
+stems
+spa
+partnered
+spectators
+rogue
+indication
+penalties
+teresa
+1801
+sen
+##tric
+dalton
+##wich
+irving
+photographic
+##vey
+dell
+deaf
+peters
+excluded
+unsure
+##vable
+patterson
+crawled
+##zio
+resided
+whipped
+latvia
+slower
+ecole
+pipes
+employers
+maharashtra
+comparable
+va
+textile
+pageant
+##gel
+alphabet
+binary
+irrigation
+chartered
+choked
+antoine
+offs
+waking
+supplement
+##wen
+quantities
+demolition
+regain
+locate
+urdu
+folks
+alt
+114
+##mc
+scary
+andreas
+whites
+##ava
+classrooms
+mw
+aesthetic
+publishes
+valleys
+guides
+cubs
+johannes
+bryant
+conventions
+affecting
+##itt
+drain
+awesome
+isolation
+prosecutor
+ambitious
+apology
+captive
+downs
+atmospheric
+lorenzo
+aisle
+beef
+foul
+##onia
+kidding
+composite
+disturbed
+illusion
+natives
+##ffer
+emi
+rockets
+riverside
+wartime
+painters
+adolf
+melted
+##ail
+uncertainty
+simulation
+hawks
+progressed
+meantime
+builder
+spray
+breach
+unhappy
+regina
+russians
+##urg
+determining
+##tation
+tram
+1806
+##quin
+aging
+##12
+1823
+garion
+rented
+mister
+diaz
+terminated
+clip
+1817
+depend
+nervously
+disco
+owe
+defenders
+shiva
+notorious
+disbelief
+shiny
+worcester
+##gation
+##yr
+trailing
+undertook
+islander
+belarus
+limitations
+watershed
+fuller
+overlooking
+utilized
+raphael
+1819
+synthetic
+breakdown
+klein
+##nate
+moaned
+memoir
+lamb
+practicing
+##erly
+cellular
+arrows
+exotic
+##graphy
+witches
+117
+charted
+rey
+hut
+hierarchy
+subdivision
+freshwater
+giuseppe
+aloud
+reyes
+qatar
+marty
+sideways
+utterly
+sexually
+jude
+prayers
+mccarthy
+softball
+blend
+damien
+##gging
+##metric
+wholly
+erupted
+lebanese
+negro
+revenues
+tasted
+comparative
+teamed
+transaction
+labeled
+maori
+sovereignty
+parkway
+trauma
+gran
+malay
+121
+advancement
+descendant
+2020
+buzz
+salvation
+inventory
+symbolic
+##making
+antarctica
+mps
+##gas
+##bro
+mohammed
+myanmar
+holt
+submarines
+tones
+##lman
+locker
+patriarch
+bangkok
+emerson
+remarks
+predators
+kin
+afghan
+confession
+norwich
+rental
+emerge
+advantages
+##zel
+rca
+##hold
+shortened
+storms
+aidan
+##matic
+autonomy
+compliance
+##quet
+dudley
+atp
+##osis
+1803
+motto
+documentation
+summary
+professors
+spectacular
+christina
+archdiocese
+flashing
+innocence
+remake
+##dell
+psychic
+reef
+scare
+employ
+rs
+sticks
+meg
+gus
+leans
+##ude
+accompany
+bergen
+tomas
+##iko
+doom
+wages
+pools
+##nch
+##bes
+breasts
+scholarly
+alison
+outline
+brittany
+breakthrough
+willis
+realistic
+##cut
+##boro
+competitor
+##stan
+pike
+picnic
+icon
+designing
+commercials
+washing
+villain
+skiing
+micro
+costumes
+auburn
+halted
+executives
+##hat
+logistics
+cycles
+vowel
+applicable
+barrett
+exclaimed
+eurovision
+eternity
+ramon
+##umi
+##lls
+modifications
+sweeping
+disgust
+##uck
+torch
+aviv
+ensuring
+rude
+dusty
+sonic
+donovan
+outskirts
+cu
+pathway
+##band
+##gun
+##lines
+disciplines
+acids
+cadet
+paired
+##40
+sketches
+##sive
+marriages
+##⁺
+folding
+peers
+slovak
+implies
+admired
+##beck
+1880s
+leopold
+instinct
+attained
+weston
+megan
+horace
+##ination
+dorsal
+ingredients
+evolutionary
+##its
+complications
+deity
+lethal
+brushing
+levy
+deserted
+institutes
+posthumously
+delivering
+telescope
+coronation
+motivated
+rapids
+luc
+flicked
+pays
+volcano
+tanner
+weighed
+##nica
+crowds
+frankie
+gifted
+addressing
+granddaughter
+winding
+##rna
+constantine
+gomez
+##front
+landscapes
+rudolf
+anthropology
+slate
+werewolf
+##lio
+astronomy
+circa
+rouge
+dreaming
+sack
+knelt
+drowned
+naomi
+prolific
+tracked
+freezing
+herb
+##dium
+agony
+randall
+twisting
+wendy
+deposit
+touches
+vein
+wheeler
+##bbled
+##bor
+batted
+retaining
+tire
+presently
+compare
+specification
+daemon
+nigel
+##grave
+merry
+recommendation
+czechoslovakia
+sandra
+ng
+roma
+##sts
+lambert
+inheritance
+sheikh
+winchester
+cries
+examining
+##yle
+comeback
+cuisine
+nave
+##iv
+ko
+retrieve
+tomatoes
+barker
+polished
+defining
+irene
+lantern
+personalities
+begging
+tract
+swore
+1809
+175
+##gic
+omaha
+brotherhood
+##rley
+haiti
+##ots
+exeter
+##ete
+##zia
+steele
+dumb
+pearson
+210
+surveyed
+elisabeth
+trends
+##ef
+fritz
+##rf
+premium
+bugs
+fraction
+calmly
+viking
+##birds
+tug
+inserted
+unusually
+##ield
+confronted
+distress
+crashing
+brent
+turks
+resign
+##olo
+cambodia
+gabe
+sauce
+##kal
+evelyn
+116
+extant
+clusters
+quarry
+teenagers
+luna
+##lers
+##ister
+affiliation
+drill
+##ashi
+panthers
+scenic
+libya
+anita
+strengthen
+inscriptions
+##cated
+lace
+sued
+judith
+riots
+##uted
+mint
+##eta
+preparations
+midst
+dub
+challenger
+##vich
+mock
+cf
+displaced
+wicket
+breaths
+enables
+schmidt
+analyst
+##lum
+ag
+highlight
+automotive
+axe
+josef
+newark
+sufficiently
+resembles
+50th
+##pal
+flushed
+mum
+traits
+##ante
+commodore
+incomplete
+warming
+titular
+ceremonial
+ethical
+118
+celebrating
+eighteenth
+cao
+lima
+medalist
+mobility
+strips
+snakes
+##city
+miniature
+zagreb
+barton
+escapes
+umbrella
+automated
+doubted
+differs
+cooled
+georgetown
+dresden
+cooked
+fade
+wyatt
+rna
+jacobs
+carlton
+abundant
+stereo
+boost
+madras
+inning
+##hia
+spur
+ip
+malayalam
+begged
+osaka
+groan
+escaping
+charging
+dose
+vista
+##aj
+bud
+papa
+communists
+advocates
+edged
+tri
+##cent
+resemble
+peaking
+necklace
+fried
+montenegro
+saxony
+goose
+glances
+stuttgart
+curator
+recruit
+grocery
+sympathetic
+##tting
+##fort
+127
+lotus
+randolph
+ancestor
+##rand
+succeeding
+jupiter
+1798
+macedonian
+##heads
+hiking
+1808
+handing
+fischer
+##itive
+garbage
+node
+##pies
+prone
+singular
+papua
+inclined
+attractions
+italia
+pouring
+motioned
+grandma
+garnered
+jacksonville
+corp
+ego
+ringing
+aluminum
+##hausen
+ordering
+##foot
+drawer
+traders
+synagogue
+##play
+##kawa
+resistant
+wandering
+fragile
+fiona
+teased
+var
+hardcore
+soaked
+jubilee
+decisive
+exposition
+mercer
+poster
+valencia
+hale
+kuwait
+1811
+##ises
+##wr
+##eed
+tavern
+gamma
+122
+johan
+##uer
+airways
+amino
+gil
+##ury
+vocational
+domains
+torres
+##sp
+generator
+folklore
+outcomes
+##keeper
+canberra
+shooter
+fl
+beams
+confrontation
+##lling
+##gram
+feb
+aligned
+forestry
+pipeline
+jax
+motorway
+conception
+decay
+##tos
+coffin
+##cott
+stalin
+1805
+escorted
+minded
+##nam
+sitcom
+purchasing
+twilight
+veronica
+additions
+passive
+tensions
+straw
+123
+frequencies
+1804
+refugee
+cultivation
+##iate
+christie
+clary
+bulletin
+crept
+disposal
+##rich
+##zong
+processor
+crescent
+##rol
+bmw
+emphasized
+whale
+nazis
+aurora
+##eng
+dwelling
+hauled
+sponsors
+toledo
+mega
+ideology
+theatres
+tessa
+cerambycidae
+saves
+turtle
+cone
+suspects
+kara
+rusty
+yelling
+greeks
+mozart
+shades
+cocked
+participant
+##tro
+shire
+spit
+freeze
+necessity
+##cos
+inmates
+nielsen
+councillors
+loaned
+uncommon
+omar
+peasants
+botanical
+offspring
+daniels
+formations
+jokes
+1794
+pioneers
+sigma
+licensing
+##sus
+wheelchair
+polite
+1807
+liquor
+pratt
+trustee
+##uta
+forewings
+balloon
+##zz
+kilometre
+camping
+explicit
+casually
+shawn
+foolish
+teammates
+nm
+hassan
+carrie
+judged
+satisfy
+vanessa
+knives
+selective
+cnn
+flowed
+##lice
+eclipse
+stressed
+eliza
+mathematician
+cease
+cultivated
+##roy
+commissions
+browns
+##ania
+destroyers
+sheridan
+meadow
+##rius
+minerals
+##cial
+downstream
+clash
+gram
+memoirs
+ventures
+baha
+seymour
+archie
+midlands
+edith
+fare
+flynn
+invite
+canceled
+tiles
+stabbed
+boulder
+incorporate
+amended
+camden
+facial
+mollusk
+unreleased
+descriptions
+yoga
+grabs
+550
+raises
+ramp
+shiver
+##rose
+coined
+pioneering
+tunes
+qing
+warwick
+tops
+119
+melanie
+giles
+##rous
+wandered
+##inal
+annexed
+nov
+30th
+unnamed
+##ished
+organizational
+airplane
+normandy
+stoke
+whistle
+blessing
+violations
+chased
+holders
+shotgun
+##ctic
+outlet
+reactor
+##vik
+tires
+tearing
+shores
+fortified
+mascot
+constituencies
+nc
+columnist
+productive
+tibet
+##rta
+lineage
+hooked
+oct
+tapes
+judging
+cody
+##gger
+hansen
+kashmir
+triggered
+##eva
+solved
+cliffs
+##tree
+resisted
+anatomy
+protesters
+transparent
+implied
+##iga
+injection
+mattress
+excluding
+##mbo
+defenses
+helpless
+devotion
+##elli
+growl
+liberals
+weber
+phenomena
+atoms
+plug
+##iff
+mortality
+apprentice
+howe
+convincing
+aaa
+swimmer
+barber
+leone
+promptly
+sodium
+def
+nowadays
+arise
+##oning
+gloucester
+corrected
+dignity
+norm
+erie
+##ders
+elders
+evacuated
+sylvia
+compression
+##yar
+hartford
+pose
+backpack
+reasoning
+accepts
+24th
+wipe
+millimetres
+marcel
+##oda
+dodgers
+albion
+1790
+overwhelmed
+aerospace
+oaks
+1795
+showcase
+acknowledge
+recovering
+nolan
+ashe
+hurts
+geology
+fashioned
+disappearance
+farewell
+swollen
+shrug
+marquis
+wimbledon
+124
+rue
+1792
+commemorate
+reduces
+experiencing
+inevitable
+calcutta
+intel
+##court
+murderer
+sticking
+fisheries
+imagery
+bloom
+280
+brake
+##inus
+gustav
+hesitation
+memorable
+po
+viral
+beans
+accidents
+tunisia
+antenna
+spilled
+consort
+treatments
+aye
+perimeter
+##gard
+donation
+hostage
+migrated
+banker
+addiction
+apex
+lil
+trout
+##ously
+conscience
+##nova
+rams
+sands
+genome
+passionate
+troubles
+##lets
+##set
+amid
+##ibility
+##ret
+higgins
+exceed
+vikings
+##vie
+payne
+##zan
+muscular
+##ste
+defendant
+sucking
+##wal
+ibrahim
+fuselage
+claudia
+vfl
+europeans
+snails
+interval
+##garh
+preparatory
+statewide
+tasked
+lacrosse
+viktor
+##lation
+angola
+##hra
+flint
+implications
+employs
+teens
+patrons
+stall
+weekends
+barriers
+scrambled
+nucleus
+tehran
+jenna
+parsons
+lifelong
+robots
+displacement
+5000
+##bles
+precipitation
+##gt
+knuckles
+clutched
+1802
+marrying
+ecology
+marx
+accusations
+declare
+scars
+kolkata
+mat
+meadows
+bermuda
+skeleton
+finalists
+vintage
+crawl
+coordinate
+affects
+subjected
+orchestral
+mistaken
+##tc
+mirrors
+dipped
+relied
+260
+arches
+candle
+##nick
+incorporating
+wildly
+fond
+basilica
+owl
+fringe
+rituals
+whispering
+stirred
+feud
+tertiary
+slick
+goat
+honorable
+whereby
+skip
+ricardo
+stripes
+parachute
+adjoining
+submerged
+synthesizer
+##gren
+intend
+positively
+ninety
+phi
+beaver
+partition
+fellows
+alexis
+prohibition
+carlisle
+bizarre
+fraternity
+##bre
+doubts
+icy
+cbc
+aquatic
+sneak
+sonny
+combines
+airports
+crude
+supervised
+spatial
+merge
+alfonso
+##bic
+corrupt
+scan
+undergo
+##ams
+disabilities
+colombian
+comparing
+dolphins
+perkins
+##lish
+reprinted
+unanimous
+bounced
+hairs
+underworld
+midwest
+semester
+bucket
+paperback
+miniseries
+coventry
+demise
+##leigh
+demonstrations
+sensor
+rotating
+yan
+##hler
+arrange
+soils
+##idge
+hyderabad
+labs
+##dr
+brakes
+grandchildren
+##nde
+negotiated
+rover
+ferrari
+continuation
+directorate
+augusta
+stevenson
+counterpart
+gore
+##rda
+nursery
+rican
+ave
+collectively
+broadly
+pastoral
+repertoire
+asserted
+discovering
+nordic
+styled
+fiba
+cunningham
+harley
+middlesex
+survives
+tumor
+tempo
+zack
+aiming
+lok
+urgent
+##rade
+##nto
+devils
+##ement
+contractor
+turin
+##wl
+##ool
+bliss
+repaired
+simmons
+moan
+astronomical
+cr
+negotiate
+lyric
+1890s
+lara
+bred
+clad
+angus
+pbs
+##ience
+engineered
+posed
+##lk
+hernandez
+possessions
+elbows
+psychiatric
+strokes
+confluence
+electorate
+lifts
+campuses
+lava
+alps
+##ep
+##ution
+##date
+physicist
+woody
+##page
+##ographic
+##itis
+juliet
+reformation
+sparhawk
+320
+complement
+suppressed
+jewel
+##½
+floated
+##kas
+continuity
+sadly
+##ische
+inability
+melting
+scanning
+paula
+flour
+judaism
+safer
+vague
+##lm
+solving
+curb
+##stown
+financially
+gable
+bees
+expired
+miserable
+cassidy
+dominion
+1789
+cupped
+145
+robbery
+facto
+amos
+warden
+resume
+tallest
+marvin
+ing
+pounded
+usd
+declaring
+gasoline
+##aux
+darkened
+270
+650
+sophomore
+##mere
+erection
+gossip
+televised
+risen
+dial
+##eu
+pillars
+##link
+passages
+profound
+##tina
+arabian
+ashton
+silicon
+nail
+##ead
+##lated
+##wer
+##hardt
+fleming
+firearms
+ducked
+circuits
+blows
+waterloo
+titans
+##lina
+atom
+fireplace
+cheshire
+financed
+activation
+algorithms
+##zzi
+constituent
+catcher
+cherokee
+partnerships
+sexuality
+platoon
+tragic
+vivian
+guarded
+whiskey
+meditation
+poetic
+##late
+##nga
+##ake
+porto
+listeners
+dominance
+kendra
+mona
+chandler
+factions
+22nd
+salisbury
+attitudes
+derivative
+##ido
+##haus
+intake
+paced
+javier
+illustrator
+barrels
+bias
+cockpit
+burnett
+dreamed
+ensuing
+##anda
+receptors
+someday
+hawkins
+mattered
+##lal
+slavic
+1799
+jesuit
+cameroon
+wasted
+tai
+wax
+lowering
+victorious
+freaking
+outright
+hancock
+librarian
+sensing
+bald
+calcium
+myers
+tablet
+announcing
+barack
+shipyard
+pharmaceutical
+##uan
+greenwich
+flush
+medley
+patches
+wolfgang
+pt
+speeches
+acquiring
+exams
+nikolai
+##gg
+hayden
+kannada
+##type
+reilly
+##pt
+waitress
+abdomen
+devastated
+capped
+pseudonym
+pharmacy
+fulfill
+paraguay
+1796
+clicked
+##trom
+archipelago
+syndicated
+##hman
+lumber
+orgasm
+rejection
+clifford
+lorraine
+advent
+mafia
+rodney
+brock
+##ght
+##used
+##elia
+cassette
+chamberlain
+despair
+mongolia
+sensors
+developmental
+upstream
+##eg
+##alis
+spanning
+165
+trombone
+basque
+seeded
+interred
+renewable
+rhys
+leapt
+revision
+molecule
+##ages
+chord
+vicious
+nord
+shivered
+23rd
+arlington
+debts
+corpus
+sunrise
+bays
+blackburn
+centimetres
+##uded
+shuddered
+gm
+strangely
+gripping
+cartoons
+isabelle
+orbital
+##ppa
+seals
+proving
+##lton
+refusal
+strengthened
+bust
+assisting
+baghdad
+batsman
+portrayal
+mara
+pushes
+spears
+og
+##cock
+reside
+nathaniel
+brennan
+1776
+confirmation
+caucus
+##worthy
+markings
+yemen
+nobles
+ku
+lazy
+viewer
+catalan
+encompasses
+sawyer
+##fall
+sparked
+substances
+patents
+braves
+arranger
+evacuation
+sergio
+persuade
+dover
+tolerance
+penguin
+cum
+jockey
+insufficient
+townships
+occupying
+declining
+plural
+processed
+projection
+puppet
+flanders
+introduces
+liability
+##yon
+gymnastics
+antwerp
+taipei
+hobart
+candles
+jeep
+wes
+observers
+126
+chaplain
+bundle
+glorious
+##hine
+hazel
+flung
+sol
+excavations
+dumped
+stares
+sh
+bangalore
+triangular
+icelandic
+intervals
+expressing
+turbine
+##vers
+songwriting
+crafts
+##igo
+jasmine
+ditch
+rite
+##ways
+entertaining
+comply
+sorrow
+wrestlers
+basel
+emirates
+marian
+rivera
+helpful
+##some
+caution
+downward
+networking
+##atory
+##tered
+darted
+genocide
+emergence
+replies
+specializing
+spokesman
+convenient
+unlocked
+fading
+augustine
+concentrations
+resemblance
+elijah
+investigator
+andhra
+##uda
+promotes
+bean
+##rrell
+fleeing
+wan
+simone
+announcer
+##ame
+##bby
+lydia
+weaver
+132
+residency
+modification
+##fest
+stretches
+##ast
+alternatively
+nat
+lowe
+lacks
+##ented
+pam
+tile
+concealed
+inferior
+abdullah
+residences
+tissues
+vengeance
+##ided
+moisture
+peculiar
+groove
+zip
+bologna
+jennings
+ninja
+oversaw
+zombies
+pumping
+batch
+livingston
+emerald
+installations
+1797
+peel
+nitrogen
+rama
+##fying
+##star
+schooling
+strands
+responding
+werner
+##ost
+lime
+casa
+accurately
+targeting
+##rod
+underway
+##uru
+hemisphere
+lester
+##yard
+occupies
+2d
+griffith
+angrily
+reorganized
+##owing
+courtney
+deposited
+##dd
+##30
+estadio
+##ifies
+dunn
+exiled
+##ying
+checks
+##combe
+##о
+##fly
+successes
+unexpectedly
+blu
+assessed
+##flower
+##ه
+observing
+sacked
+spiders
+kn
+##tail
+mu
+nodes
+prosperity
+audrey
+divisional
+155
+broncos
+tangled
+adjust
+feeds
+erosion
+paolo
+surf
+directory
+snatched
+humid
+admiralty
+screwed
+gt
+reddish
+##nese
+modules
+trench
+lamps
+bind
+leah
+bucks
+competes
+##nz
+##form
+transcription
+##uc
+isles
+violently
+clutching
+pga
+cyclist
+inflation
+flats
+ragged
+unnecessary
+##hian
+stubborn
+coordinated
+harriet
+baba
+disqualified
+330
+insect
+wolfe
+##fies
+reinforcements
+rocked
+duel
+winked
+embraced
+bricks
+##raj
+hiatus
+defeats
+pending
+brightly
+jealousy
+##xton
+##hm
+##uki
+lena
+gdp
+colorful
+##dley
+stein
+kidney
+##shu
+underwear
+wanderers
+##haw
+##icus
+guardians
+m³
+roared
+habits
+##wise
+permits
+gp
+uranium
+punished
+disguise
+bundesliga
+elise
+dundee
+erotic
+partisan
+pi
+collectors
+float
+individually
+rendering
+behavioral
+bucharest
+ser
+hare
+valerie
+corporal
+nutrition
+proportional
+##isa
+immense
+##kis
+pavement
+##zie
+##eld
+sutherland
+crouched
+1775
+##lp
+suzuki
+trades
+endurance
+operas
+crosby
+prayed
+priory
+rory
+socially
+##urn
+gujarat
+##pu
+walton
+cube
+pasha
+privilege
+lennon
+floods
+thorne
+waterfall
+nipple
+scouting
+approve
+##lov
+minorities
+voter
+dwight
+extensions
+assure
+ballroom
+slap
+dripping
+privileges
+rejoined
+confessed
+demonstrating
+patriotic
+yell
+investor
+##uth
+pagan
+slumped
+squares
+##cle
+##kins
+confront
+bert
+embarrassment
+##aid
+aston
+urging
+sweater
+starr
+yuri
+brains
+williamson
+commuter
+mortar
+structured
+selfish
+exports
+##jon
+cds
+##him
+unfinished
+##rre
+mortgage
+destinations
+##nagar
+canoe
+solitary
+buchanan
+delays
+magistrate
+fk
+##pling
+motivation
+##lier
+##vier
+recruiting
+assess
+##mouth
+malik
+antique
+1791
+pius
+rahman
+reich
+tub
+zhou
+smashed
+airs
+galway
+xii
+conditioning
+honduras
+discharged
+dexter
+##pf
+lionel
+129
+debates
+lemon
+tiffany
+volunteered
+dom
+dioxide
+procession
+devi
+sic
+tremendous
+advertisements
+colts
+transferring
+verdict
+hanover
+decommissioned
+utter
+relate
+pac
+racism
+##top
+beacon
+limp
+similarity
+terra
+occurrence
+ant
+##how
+becky
+capt
+updates
+armament
+richie
+pal
+##graph
+halloween
+mayo
+##ssen
+##bone
+cara
+serena
+fcc
+dolls
+obligations
+##dling
+violated
+lafayette
+jakarta
+exploitation
+##ime
+infamous
+iconic
+##lah
+##park
+kitty
+moody
+reginald
+dread
+spill
+crystals
+olivier
+modeled
+bluff
+equilibrium
+separating
+notices
+ordnance
+extinction
+onset
+cosmic
+attachment
+sammy
+expose
+privy
+anchored
+##bil
+abbott
+admits
+bending
+baritone
+emmanuel
+policeman
+vaughan
+winged
+climax
+dresses
+denny
+polytechnic
+mohamed
+burmese
+authentic
+nikki
+genetics
+grandparents
+homestead
+gaza
+postponed
+metacritic
+una
+##sby
+##bat
+unstable
+dissertation
+##rial
+##cian
+curls
+obscure
+uncovered
+bronx
+praying
+disappearing
+##hoe
+prehistoric
+coke
+turret
+mutations
+nonprofit
+pits
+monaco
+##ي
+##usion
+prominently
+dispatched
+podium
+##mir
+uci
+##uation
+133
+fortifications
+birthplace
+kendall
+##lby
+##oll
+preacher
+rack
+goodman
+##rman
+persistent
+##ott
+countless
+jaime
+recorder
+lexington
+persecution
+jumps
+renewal
+wagons
+##11
+crushing
+##holder
+decorations
+##lake
+abundance
+wrath
+laundry
+£1
+garde
+##rp
+jeanne
+beetles
+peasant
+##sl
+splitting
+caste
+sergei
+##rer
+##ema
+scripts
+##ively
+rub
+satellites
+##vor
+inscribed
+verlag
+scrapped
+gale
+packages
+chick
+potato
+slogan
+kathleen
+arabs
+##culture
+counterparts
+reminiscent
+choral
+##tead
+rand
+retains
+bushes
+dane
+accomplish
+courtesy
+closes
+##oth
+slaughter
+hague
+krakow
+lawson
+tailed
+elias
+ginger
+##ttes
+canopy
+betrayal
+rebuilding
+turf
+##hof
+frowning
+allegiance
+brigades
+kicks
+rebuild
+polls
+alias
+nationalism
+td
+rowan
+audition
+bowie
+fortunately
+recognizes
+harp
+dillon
+horrified
+##oro
+renault
+##tics
+ropes
+##α
+presumed
+rewarded
+infrared
+wiping
+accelerated
+illustration
+##rid
+presses
+practitioners
+badminton
+##iard
+detained
+##tera
+recognizing
+relates
+misery
+##sies
+##tly
+reproduction
+piercing
+potatoes
+thornton
+esther
+manners
+hbo
+##aan
+ours
+bullshit
+ernie
+perennial
+sensitivity
+illuminated
+rupert
+##jin
+##iss
+##ear
+rfc
+nassau
+##dock
+staggered
+socialism
+##haven
+appointments
+nonsense
+prestige
+sharma
+haul
+##tical
+solidarity
+gps
+##ook
+##rata
+igor
+pedestrian
+##uit
+baxter
+tenants
+wires
+medication
+unlimited
+guiding
+impacts
+diabetes
+##rama
+sasha
+pas
+clive
+extraction
+131
+continually
+constraints
+##bilities
+sonata
+hunted
+sixteenth
+chu
+planting
+quote
+mayer
+pretended
+abs
+spat
+##hua
+ceramic
+##cci
+curtains
+pigs
+pitching
+##dad
+latvian
+sore
+dayton
+##sted
+##qi
+patrols
+slice
+playground
+##nted
+shone
+stool
+apparatus
+inadequate
+mates
+treason
+##ija
+desires
+##liga
+##croft
+somalia
+laurent
+mir
+leonardo
+oracle
+grape
+obliged
+chevrolet
+thirteenth
+stunning
+enthusiastic
+##ede
+accounted
+concludes
+currents
+basil
+##kovic
+drought
+##rica
+mai
+##aire
+shove
+posting
+##shed
+pilgrimage
+humorous
+packing
+fry
+pencil
+wines
+smells
+144
+marilyn
+aching
+newest
+clung
+bon
+neighbours
+sanctioned
+##pie
+mug
+##stock
+drowning
+##mma
+hydraulic
+##vil
+hiring
+reminder
+lilly
+investigators
+##ncies
+sour
+##eous
+compulsory
+packet
+##rion
+##graphic
+##elle
+cannes
+##inate
+depressed
+##rit
+heroic
+importantly
+theresa
+##tled
+conway
+saturn
+marginal
+rae
+##xia
+corresponds
+royce
+pact
+jasper
+explosives
+packaging
+aluminium
+##ttered
+denotes
+rhythmic
+spans
+assignments
+hereditary
+outlined
+originating
+sundays
+lad
+reissued
+greeting
+beatrice
+##dic
+pillar
+marcos
+plots
+handbook
+alcoholic
+judiciary
+avant
+slides
+extract
+masculine
+blur
+##eum
+##force
+homage
+trembled
+owens
+hymn
+trey
+omega
+signaling
+socks
+accumulated
+reacted
+attic
+theo
+lining
+angie
+distraction
+primera
+talbot
+##key
+1200
+ti
+creativity
+billed
+##hey
+deacon
+eduardo
+identifies
+proposition
+dizzy
+gunner
+hogan
+##yam
+##pping
+##hol
+ja
+##chan
+jensen
+reconstructed
+##berger
+clearance
+darius
+##nier
+abe
+harlem
+plea
+dei
+circled
+emotionally
+notation
+fascist
+neville
+exceeded
+upwards
+viable
+ducks
+##fo
+workforce
+racer
+limiting
+shri
+##lson
+possesses
+1600
+kerr
+moths
+devastating
+laden
+disturbing
+locking
+##cture
+gal
+fearing
+accreditation
+flavor
+aide
+1870s
+mountainous
+##baum
+melt
+##ures
+motel
+texture
+servers
+soda
+##mb
+herd
+##nium
+erect
+puzzled
+hum
+peggy
+examinations
+gould
+testified
+geoff
+ren
+devised
+sacks
+##law
+denial
+posters
+grunted
+cesar
+tutor
+ec
+gerry
+offerings
+byrne
+falcons
+combinations
+ct
+incoming
+pardon
+rocking
+26th
+avengers
+flared
+mankind
+seller
+uttar
+loch
+nadia
+stroking
+exposing
+##hd
+fertile
+ancestral
+instituted
+##has
+noises
+prophecy
+taxation
+eminent
+vivid
+pol
+##bol
+dart
+indirect
+multimedia
+notebook
+upside
+displaying
+adrenaline
+referenced
+geometric
+##iving
+progression
+##ddy
+blunt
+announce
+##far
+implementing
+##lav
+aggression
+liaison
+cooler
+cares
+headache
+plantations
+gorge
+dots
+impulse
+thickness
+ashamed
+averaging
+kathy
+obligation
+precursor
+137
+fowler
+symmetry
+thee
+225
+hears
+##rai
+undergoing
+ads
+butcher
+bowler
+##lip
+cigarettes
+subscription
+goodness
+##ically
+browne
+##hos
+##tech
+kyoto
+donor
+##erty
+damaging
+friction
+drifting
+expeditions
+hardened
+prostitution
+152
+fauna
+blankets
+claw
+tossing
+snarled
+butterflies
+recruits
+investigative
+coated
+healed
+138
+communal
+hai
+xiii
+academics
+boone
+psychologist
+restless
+lahore
+stephens
+mba
+brendan
+foreigners
+printer
+##pc
+ached
+explode
+27th
+deed
+scratched
+dared
+##pole
+cardiac
+1780
+okinawa
+proto
+commando
+compelled
+oddly
+electrons
+##base
+replica
+thanksgiving
+##rist
+sheila
+deliberate
+stafford
+tidal
+representations
+hercules
+ou
+##path
+##iated
+kidnapping
+lenses
+##tling
+deficit
+samoa
+mouths
+consuming
+computational
+maze
+granting
+smirk
+razor
+fixture
+ideals
+inviting
+aiden
+nominal
+##vs
+issuing
+julio
+pitt
+ramsey
+docks
+##oss
+exhaust
+##owed
+bavarian
+draped
+anterior
+mating
+ethiopian
+explores
+noticing
+##nton
+discarded
+convenience
+hoffman
+endowment
+beasts
+cartridge
+mormon
+paternal
+probe
+sleeves
+interfere
+lump
+deadline
+##rail
+jenks
+bulldogs
+scrap
+alternating
+justified
+reproductive
+nam
+seize
+descending
+secretariat
+kirby
+coupe
+grouped
+smash
+panther
+sedan
+tapping
+##18
+lola
+cheer
+germanic
+unfortunate
+##eter
+unrelated
+##fan
+subordinate
+##sdale
+suzanne
+advertisement
+##ility
+horsepower
+##lda
+cautiously
+discourse
+luigi
+##mans
+##fields
+noun
+prevalent
+mao
+schneider
+everett
+surround
+governorate
+kira
+##avia
+westward
+##take
+misty
+rails
+sustainability
+134
+unused
+##rating
+packs
+toast
+unwilling
+regulate
+thy
+suffrage
+nile
+awe
+assam
+definitions
+travelers
+affordable
+##rb
+conferred
+sells
+undefeated
+beneficial
+torso
+basal
+repeating
+remixes
+##pass
+bahrain
+cables
+fang
+##itated
+excavated
+numbering
+statutory
+##rey
+deluxe
+##lian
+forested
+ramirez
+derbyshire
+zeus
+slamming
+transfers
+astronomer
+banana
+lottery
+berg
+histories
+bamboo
+##uchi
+resurrection
+posterior
+bowls
+vaguely
+##thi
+thou
+preserving
+tensed
+offence
+##inas
+meyrick
+callum
+ridden
+watt
+langdon
+tying
+lowland
+snorted
+daring
+truman
+##hale
+##girl
+aura
+overly
+filing
+weighing
+goa
+infections
+philanthropist
+saunders
+eponymous
+##owski
+latitude
+perspectives
+reviewing
+mets
+commandant
+radial
+##kha
+flashlight
+reliability
+koch
+vowels
+amazed
+ada
+elaine
+supper
+##rth
+##encies
+predator
+debated
+soviets
+cola
+##boards
+##nah
+compartment
+crooked
+arbitrary
+fourteenth
+##ctive
+havana
+majors
+steelers
+clips
+profitable
+ambush
+exited
+packers
+##tile
+nude
+cracks
+fungi
+##е
+limb
+trousers
+josie
+shelby
+tens
+frederic
+##ος
+definite
+smoothly
+constellation
+insult
+baton
+discs
+lingering
+##nco
+conclusions
+lent
+staging
+becker
+grandpa
+shaky
+##tron
+einstein
+obstacles
+sk
+adverse
+elle
+economically
+##moto
+mccartney
+thor
+dismissal
+motions
+readings
+nostrils
+treatise
+##pace
+squeezing
+evidently
+prolonged
+1783
+venezuelan
+je
+marguerite
+beirut
+takeover
+shareholders
+##vent
+denise
+digit
+airplay
+norse
+##bbling
+imaginary
+pills
+hubert
+blaze
+vacated
+eliminating
+##ello
+vine
+mansfield
+##tty
+retrospective
+barrow
+borne
+clutch
+bail
+forensic
+weaving
+##nett
+##witz
+desktop
+citadel
+promotions
+worrying
+dorset
+ieee
+subdivided
+##iating
+manned
+expeditionary
+pickup
+synod
+chuckle
+185
+barney
+##rz
+##ffin
+functionality
+karachi
+litigation
+meanings
+uc
+lick
+turbo
+anders
+##ffed
+execute
+curl
+oppose
+ankles
+typhoon
+##د
+##ache
+##asia
+linguistics
+compassion
+pressures
+grazing
+perfection
+##iting
+immunity
+monopoly
+muddy
+backgrounds
+136
+namibia
+francesca
+monitors
+attracting
+stunt
+tuition
+##ии
+vegetable
+##mates
+##quent
+mgm
+jen
+complexes
+forts
+##ond
+cellar
+bites
+seventeenth
+royals
+flemish
+failures
+mast
+charities
+##cular
+peruvian
+capitals
+macmillan
+ipswich
+outward
+frigate
+postgraduate
+folds
+employing
+##ouse
+concurrently
+fiery
+##tai
+contingent
+nightmares
+monumental
+nicaragua
+##kowski
+lizard
+mal
+fielding
+gig
+reject
+##pad
+harding
+##ipe
+coastline
+##cin
+##nos
+beethoven
+humphrey
+innovations
+##tam
+##nge
+norris
+doris
+solicitor
+huang
+obey
+141
+##lc
+niagara
+##tton
+shelves
+aug
+bourbon
+curry
+nightclub
+specifications
+hilton
+##ndo
+centennial
+dispersed
+worm
+neglected
+briggs
+sm
+font
+kuala
+uneasy
+plc
+##nstein
+##bound
+##aking
+##burgh
+awaiting
+pronunciation
+##bbed
+##quest
+eh
+optimal
+zhu
+raped
+greens
+presided
+brenda
+worries
+##life
+venetian
+marxist
+turnout
+##lius
+refined
+braced
+sins
+grasped
+sunderland
+nickel
+speculated
+lowell
+cyrillic
+communism
+fundraising
+resembling
+colonists
+mutant
+freddie
+usc
+##mos
+gratitude
+##run
+mural
+##lous
+chemist
+wi
+reminds
+28th
+steals
+tess
+pietro
+##ingen
+promoter
+ri
+microphone
+honoured
+rai
+sant
+##qui
+feather
+##nson
+burlington
+kurdish
+terrorists
+deborah
+sickness
+##wed
+##eet
+hazard
+irritated
+desperation
+veil
+clarity
+##rik
+jewels
+xv
+##gged
+##ows
+##cup
+berkshire
+unfair
+mysteries
+orchid
+winced
+exhaustion
+renovations
+stranded
+obe
+infinity
+##nies
+adapt
+redevelopment
+thanked
+registry
+olga
+domingo
+noir
+tudor
+ole
+##atus
+commenting
+behaviors
+##ais
+crisp
+pauline
+probable
+stirling
+wigan
+##bian
+paralympics
+panting
+surpassed
+##rew
+luca
+barred
+pony
+famed
+##sters
+cassandra
+waiter
+carolyn
+exported
+##orted
+andres
+destructive
+deeds
+jonah
+castles
+vacancy
+suv
+##glass
+1788
+orchard
+yep
+famine
+belarusian
+sprang
+##forth
+skinny
+##mis
+administrators
+rotterdam
+zambia
+zhao
+boiler
+discoveries
+##ride
+##physics
+lucius
+disappointing
+outreach
+spoon
+##frame
+qualifications
+unanimously
+enjoys
+regency
+##iidae
+stade
+realism
+veterinary
+rodgers
+dump
+alain
+chestnut
+castile
+censorship
+rumble
+gibbs
+##itor
+communion
+reggae
+inactivated
+logs
+loads
+##houses
+homosexual
+##iano
+ale
+informs
+##cas
+phrases
+plaster
+linebacker
+ambrose
+kaiser
+fascinated
+850
+limerick
+recruitment
+forge
+mastered
+##nding
+leinster
+rooted
+threaten
+##strom
+borneo
+##hes
+suggestions
+scholarships
+propeller
+documentaries
+patronage
+coats
+constructing
+invest
+neurons
+comet
+entirety
+shouts
+identities
+annoying
+unchanged
+wary
+##antly
+##ogy
+neat
+oversight
+##kos
+phillies
+replay
+constance
+##kka
+incarnation
+humble
+skies
+minus
+##acy
+smithsonian
+##chel
+guerrilla
+jar
+cadets
+##plate
+surplus
+audit
+##aru
+cracking
+joanna
+louisa
+pacing
+##lights
+intentionally
+##iri
+diner
+nwa
+imprint
+australians
+tong
+unprecedented
+bunker
+naive
+specialists
+ark
+nichols
+railing
+leaked
+pedal
+##uka
+shrub
+longing
+roofs
+v8
+captains
+neural
+tuned
+##ntal
+##jet
+emission
+medina
+frantic
+codex
+definitive
+sid
+abolition
+intensified
+stocks
+enrique
+sustain
+genoa
+oxide
+##written
+clues
+cha
+##gers
+tributaries
+fragment
+venom
+##rity
+##ente
+##sca
+muffled
+vain
+sire
+laos
+##ingly
+##hana
+hastily
+snapping
+surfaced
+sentiment
+motive
+##oft
+contests
+approximate
+mesa
+luckily
+dinosaur
+exchanges
+propelled
+accord
+bourne
+relieve
+tow
+masks
+offended
+##ues
+cynthia
+##mmer
+rains
+bartender
+zinc
+reviewers
+lois
+##sai
+legged
+arrogant
+rafe
+rosie
+comprise
+handicap
+blockade
+inlet
+lagoon
+copied
+drilling
+shelley
+petals
+##inian
+mandarin
+obsolete
+##inated
+onward
+arguably
+productivity
+cindy
+praising
+seldom
+busch
+discusses
+raleigh
+shortage
+ranged
+stanton
+encouragement
+firstly
+conceded
+overs
+temporal
+##uke
+cbe
+##bos
+woo
+certainty
+pumps
+##pton
+stalked
+##uli
+lizzie
+periodic
+thieves
+weaker
+##night
+gases
+shoving
+chooses
+wc
+##chemical
+prompting
+weights
+##kill
+robust
+flanked
+sticky
+hu
+tuberculosis
+##eb
+##eal
+christchurch
+resembled
+wallet
+reese
+inappropriate
+pictured
+distract
+fixing
+fiddle
+giggled
+burger
+heirs
+hairy
+mechanic
+torque
+apache
+obsessed
+chiefly
+cheng
+logging
+##tag
+extracted
+meaningful
+numb
+##vsky
+gloucestershire
+reminding
+##bay
+unite
+##lit
+breeds
+diminished
+clown
+glove
+1860s
+##ن
+##ug
+archibald
+focal
+freelance
+sliced
+depiction
+##yk
+organism
+switches
+sights
+stray
+crawling
+##ril
+lever
+leningrad
+interpretations
+loops
+anytime
+reel
+alicia
+delighted
+##ech
+inhaled
+xiv
+suitcase
+bernie
+vega
+licenses
+northampton
+exclusion
+induction
+monasteries
+racecourse
+homosexuality
+##right
+##sfield
+##rky
+dimitri
+michele
+alternatives
+ions
+commentators
+genuinely
+objected
+pork
+hospitality
+fencing
+stephan
+warships
+peripheral
+wit
+drunken
+wrinkled
+quentin
+spends
+departing
+chung
+numerical
+spokesperson
+##zone
+johannesburg
+caliber
+killers
+##udge
+assumes
+neatly
+demographic
+abigail
+bloc
+##vel
+mounting
+##lain
+bentley
+slightest
+xu
+recipients
+##jk
+merlin
+##writer
+seniors
+prisons
+blinking
+hindwings
+flickered
+kappa
+##hel
+80s
+strengthening
+appealing
+brewing
+gypsy
+mali
+lashes
+hulk
+unpleasant
+harassment
+bio
+treaties
+predict
+instrumentation
+pulp
+troupe
+boiling
+mantle
+##ffe
+ins
+##vn
+dividing
+handles
+verbs
+##onal
+coconut
+senegal
+340
+thorough
+gum
+momentarily
+##sto
+cocaine
+panicked
+destined
+##turing
+teatro
+denying
+weary
+captained
+mans
+##hawks
+##code
+wakefield
+bollywood
+thankfully
+##16
+cyril
+##wu
+amendments
+##bahn
+consultation
+stud
+reflections
+kindness
+1787
+internally
+##ovo
+tex
+mosaic
+distribute
+paddy
+seeming
+143
+##hic
+piers
+##15
+##mura
+##verse
+popularly
+winger
+kang
+sentinel
+mccoy
+##anza
+covenant
+##bag
+verge
+fireworks
+suppress
+thrilled
+dominate
+##jar
+swansea
+##60
+142
+reconciliation
+##ndi
+stiffened
+cue
+dorian
+##uf
+damascus
+amor
+ida
+foremost
+##aga
+porsche
+unseen
+dir
+##had
+##azi
+stony
+lexi
+melodies
+##nko
+angular
+integer
+podcast
+ants
+inherent
+jaws
+justify
+persona
+##olved
+josephine
+##nr
+##ressed
+customary
+flashes
+gala
+cyrus
+glaring
+backyard
+ariel
+physiology
+greenland
+html
+stir
+avon
+atletico
+finch
+methodology
+ked
+##lent
+mas
+catholicism
+townsend
+branding
+quincy
+fits
+containers
+1777
+ashore
+aragon
+##19
+forearm
+poisoning
+##sd
+adopting
+conquer
+grinding
+amnesty
+keller
+finances
+evaluate
+forged
+lankan
+instincts
+##uto
+guam
+bosnian
+photographed
+workplace
+desirable
+protector
+##dog
+allocation
+intently
+encourages
+willy
+##sten
+bodyguard
+electro
+brighter
+##ν
+bihar
+##chev
+lasts
+opener
+amphibious
+sal
+verde
+arte
+##cope
+captivity
+vocabulary
+yields
+##tted
+agreeing
+desmond
+pioneered
+##chus
+strap
+campaigned
+railroads
+##ович
+emblem
+##dre
+stormed
+501
+##ulous
+marijuana
+northumberland
+##gn
+##nath
+bowen
+landmarks
+beaumont
+##qua
+danube
+##bler
+attorneys
+th
+ge
+flyers
+critique
+villains
+cass
+mutation
+acc
+##0s
+colombo
+mckay
+motif
+sampling
+concluding
+syndicate
+##rell
+neon
+stables
+ds
+warnings
+clint
+mourning
+wilkinson
+##tated
+merrill
+leopard
+evenings
+exhaled
+emil
+sonia
+ezra
+discrete
+stove
+farrell
+fifteenth
+prescribed
+superhero
+##rier
+worms
+helm
+wren
+##duction
+##hc
+expo
+##rator
+hq
+unfamiliar
+antony
+prevents
+acceleration
+fiercely
+mari
+painfully
+calculations
+cheaper
+ign
+clifton
+irvine
+davenport
+mozambique
+##np
+pierced
+##evich
+wonders
+##wig
+##cate
+##iling
+crusade
+ware
+##uel
+enzymes
+reasonably
+mls
+##coe
+mater
+ambition
+bunny
+eliot
+kernel
+##fin
+asphalt
+headmaster
+torah
+aden
+lush
+pins
+waived
+##care
+##yas
+joao
+substrate
+enforce
+##grad
+##ules
+alvarez
+selections
+epidemic
+tempted
+##bit
+bremen
+translates
+ensured
+waterfront
+29th
+forrest
+manny
+malone
+kramer
+reigning
+cookies
+simpler
+absorption
+205
+engraved
+##ffy
+evaluated
+1778
+haze
+146
+comforting
+crossover
+##abe
+thorn
+##rift
+##imo
+##pop
+suppression
+fatigue
+cutter
+##tr
+201
+wurttemberg
+##orf
+enforced
+hovering
+proprietary
+gb
+samurai
+syllable
+ascent
+lacey
+tick
+lars
+tractor
+merchandise
+rep
+bouncing
+defendants
+##yre
+huntington
+##ground
+##oko
+standardized
+##hor
+##hima
+assassinated
+nu
+predecessors
+rainy
+liar
+assurance
+lyrical
+##uga
+secondly
+flattened
+ios
+parameter
+undercover
+##mity
+bordeaux
+punish
+ridges
+markers
+exodus
+inactive
+hesitate
+debbie
+nyc
+pledge
+savoy
+nagar
+offset
+organist
+##tium
+hesse
+marin
+converting
+##iver
+diagram
+propulsion
+pu
+validity
+reverted
+supportive
+##dc
+ministries
+clans
+responds
+proclamation
+##inae
+##ø
+##rea
+ein
+pleading
+patriot
+sf
+birch
+islanders
+strauss
+hates
+##dh
+brandenburg
+concession
+rd
+##ob
+1900s
+killings
+textbook
+antiquity
+cinematography
+wharf
+embarrassing
+setup
+creed
+farmland
+inequality
+centred
+signatures
+fallon
+370
+##ingham
+##uts
+ceylon
+gazing
+directive
+laurie
+##tern
+globally
+##uated
+##dent
+allah
+excavation
+threads
+##cross
+148
+frantically
+icc
+utilize
+determines
+respiratory
+thoughtful
+receptions
+##dicate
+merging
+chandra
+seine
+147
+builders
+builds
+diagnostic
+dev
+visibility
+goddamn
+analyses
+dhaka
+cho
+proves
+chancel
+concurrent
+curiously
+canadians
+pumped
+restoring
+1850s
+turtles
+jaguar
+sinister
+spinal
+traction
+declan
+vows
+1784
+glowed
+capitalism
+swirling
+install
+universidad
+##lder
+##oat
+soloist
+##genic
+##oor
+coincidence
+beginnings
+nissan
+dip
+resorts
+caucasus
+combustion
+infectious
+##eno
+pigeon
+serpent
+##itating
+conclude
+masked
+salad
+jew
+##gr
+surreal
+toni
+##wc
+harmonica
+151
+##gins
+##etic
+##coat
+fishermen
+intending
+bravery
+##wave
+klaus
+titan
+wembley
+taiwanese
+ransom
+40th
+incorrect
+hussein
+eyelids
+jp
+cooke
+dramas
+utilities
+##etta
+##print
+eisenhower
+principally
+granada
+lana
+##rak
+openings
+concord
+##bl
+bethany
+connie
+morality
+sega
+##mons
+##nard
+earnings
+##kara
+##cine
+wii
+communes
+##rel
+coma
+composing
+softened
+severed
+grapes
+##17
+nguyen
+analyzed
+warlord
+hubbard
+heavenly
+behave
+slovenian
+##hit
+##ony
+hailed
+filmmakers
+trance
+caldwell
+skye
+unrest
+coward
+likelihood
+##aging
+bern
+sci
+taliban
+honolulu
+propose
+##wang
+1700
+browser
+imagining
+cobra
+contributes
+dukes
+instinctively
+conan
+violinist
+##ores
+accessories
+gradual
+##amp
+quotes
+sioux
+##dating
+undertake
+intercepted
+sparkling
+compressed
+139
+fungus
+tombs
+haley
+imposing
+rests
+degradation
+lincolnshire
+retailers
+wetlands
+tulsa
+distributor
+dungeon
+nun
+greenhouse
+convey
+atlantis
+aft
+exits
+oman
+dresser
+lyons
+##sti
+joking
+eddy
+judgement
+omitted
+digits
+##cts
+##game
+juniors
+##rae
+cents
+stricken
+une
+##ngo
+wizards
+weir
+breton
+nan
+technician
+fibers
+liking
+royalty
+##cca
+154
+persia
+terribly
+magician
+##rable
+##unt
+vance
+cafeteria
+booker
+camille
+warmer
+##static
+consume
+cavern
+gaps
+compass
+contemporaries
+foyer
+soothing
+graveyard
+maj
+plunged
+blush
+##wear
+cascade
+demonstrates
+ordinance
+##nov
+boyle
+##lana
+rockefeller
+shaken
+banjo
+izzy
+##ense
+breathless
+vines
+##32
+##eman
+alterations
+chromosome
+dwellings
+feudal
+mole
+153
+catalonia
+relics
+tenant
+mandated
+##fm
+fridge
+hats
+honesty
+patented
+raul
+heap
+cruisers
+accusing
+enlightenment
+infants
+wherein
+chatham
+contractors
+zen
+affinity
+hc
+osborne
+piston
+156
+traps
+maturity
+##rana
+lagos
+##zal
+peering
+##nay
+attendant
+dealers
+protocols
+subset
+prospects
+biographical
+##cre
+artery
+##zers
+insignia
+nuns
+endured
+##eration
+recommend
+schwartz
+serbs
+berger
+cromwell
+crossroads
+##ctor
+enduring
+clasped
+grounded
+##bine
+marseille
+twitched
+abel
+choke
+https
+catalyst
+moldova
+italians
+##tist
+disastrous
+wee
+##oured
+##nti
+wwf
+nope
+##piration
+##asa
+expresses
+thumbs
+167
+##nza
+coca
+1781
+cheating
+##ption
+skipped
+sensory
+heidelberg
+spies
+satan
+dangers
+semifinal
+202
+bohemia
+whitish
+confusing
+shipbuilding
+relies
+surgeons
+landings
+ravi
+baku
+moor
+suffix
+alejandro
+##yana
+litre
+upheld
+##unk
+rajasthan
+##rek
+coaster
+insists
+posture
+scenarios
+etienne
+favoured
+appoint
+transgender
+elephants
+poked
+greenwood
+defences
+fulfilled
+militant
+somali
+1758
+chalk
+potent
+##ucci
+migrants
+wink
+assistants
+nos
+restriction
+activism
+niger
+##ario
+colon
+shaun
+##sat
+daphne
+##erated
+swam
+congregations
+reprise
+considerations
+magnet
+playable
+xvi
+##р
+overthrow
+tobias
+knob
+chavez
+coding
+##mers
+propped
+katrina
+orient
+newcomer
+##suke
+temperate
+##pool
+farmhouse
+interrogation
+##vd
+committing
+##vert
+forthcoming
+strawberry
+joaquin
+macau
+ponds
+shocking
+siberia
+##cellular
+chant
+contributors
+##nant
+##ologists
+sped
+absorb
+hail
+1782
+spared
+##hore
+barbados
+karate
+opus
+originates
+saul
+##xie
+evergreen
+leaped
+##rock
+correlation
+exaggerated
+weekday
+unification
+bump
+tracing
+brig
+afb
+pathways
+utilizing
+##ners
+mod
+mb
+disturbance
+kneeling
+##stad
+##guchi
+100th
+pune
+##thy
+decreasing
+168
+manipulation
+miriam
+academia
+ecosystem
+occupational
+rbi
+##lem
+rift
+##14
+rotary
+stacked
+incorporation
+awakening
+generators
+guerrero
+racist
+##omy
+cyber
+derivatives
+culminated
+allie
+annals
+panzer
+sainte
+wikipedia
+pops
+zu
+austro
+##vate
+algerian
+politely
+nicholson
+mornings
+educate
+tastes
+thrill
+dartmouth
+##gating
+db
+##jee
+regan
+differing
+concentrating
+choreography
+divinity
+##media
+pledged
+alexandre
+routing
+gregor
+madeline
+##idal
+apocalypse
+##hora
+gunfire
+culminating
+elves
+fined
+liang
+lam
+programmed
+tar
+guessing
+transparency
+gabrielle
+##gna
+cancellation
+flexibility
+##lining
+accession
+shea
+stronghold
+nets
+specializes
+##rgan
+abused
+hasan
+sgt
+ling
+exceeding
+##₄
+admiration
+supermarket
+##ark
+photographers
+specialised
+tilt
+resonance
+hmm
+perfume
+380
+sami
+threatens
+garland
+botany
+guarding
+boiled
+greet
+puppy
+russo
+supplier
+wilmington
+vibrant
+vijay
+##bius
+paralympic
+grumbled
+paige
+faa
+licking
+margins
+hurricanes
+##gong
+fest
+grenade
+ripping
+##uz
+counseling
+weigh
+##sian
+needles
+wiltshire
+edison
+costly
+##not
+fulton
+tramway
+redesigned
+staffordshire
+cache
+gasping
+watkins
+sleepy
+candidacy
+##group
+monkeys
+timeline
+throbbing
+##bid
+##sos
+berth
+uzbekistan
+vanderbilt
+bothering
+overturned
+ballots
+gem
+##iger
+sunglasses
+subscribers
+hooker
+compelling
+ang
+exceptionally
+saloon
+stab
+##rdi
+carla
+terrifying
+rom
+##vision
+coil
+##oids
+satisfying
+vendors
+31st
+mackay
+deities
+overlooked
+ambient
+bahamas
+felipe
+olympia
+whirled
+botanist
+advertised
+tugging
+##dden
+disciples
+morales
+unionist
+rites
+foley
+morse
+motives
+creepy
+##₀
+soo
+##sz
+bargain
+highness
+frightening
+turnpike
+tory
+reorganization
+##cer
+depict
+biographer
+##walk
+unopposed
+manifesto
+##gles
+institut
+emile
+accidental
+kapoor
+##dam
+kilkenny
+cortex
+lively
+##13
+romanesque
+jain
+shan
+cannons
+##ood
+##ske
+petrol
+echoing
+amalgamated
+disappears
+cautious
+proposes
+sanctions
+trenton
+##ر
+flotilla
+aus
+contempt
+tor
+canary
+cote
+theirs
+##hun
+conceptual
+deleted
+fascinating
+paso
+blazing
+elf
+honourable
+hutchinson
+##eiro
+##outh
+##zin
+surveyor
+tee
+amidst
+wooded
+reissue
+intro
+##ono
+cobb
+shelters
+newsletter
+hanson
+brace
+encoding
+confiscated
+dem
+caravan
+marino
+scroll
+melodic
+cows
+imam
+##adi
+##aneous
+northward
+searches
+biodiversity
+cora
+310
+roaring
+##bers
+connell
+theologian
+halo
+compose
+pathetic
+unmarried
+dynamo
+##oot
+az
+calculation
+toulouse
+deserves
+humour
+nr
+forgiveness
+tam
+undergone
+martyr
+pamela
+myths
+whore
+counselor
+hicks
+290
+heavens
+battleship
+electromagnetic
+##bbs
+stellar
+establishments
+presley
+hopped
+##chin
+temptation
+90s
+wills
+nas
+##yuan
+nhs
+##nya
+seminars
+##yev
+adaptations
+gong
+asher
+lex
+indicator
+sikh
+tobago
+cites
+goin
+##yte
+satirical
+##gies
+characterised
+correspond
+bubbles
+lure
+participates
+##vid
+eruption
+skate
+therapeutic
+1785
+canals
+wholesale
+defaulted
+sac
+460
+petit
+##zzled
+virgil
+leak
+ravens
+256
+portraying
+##yx
+ghetto
+creators
+dams
+portray
+vicente
+##rington
+fae
+namesake
+bounty
+##arium
+joachim
+##ota
+##iser
+aforementioned
+axle
+snout
+depended
+dismantled
+reuben
+480
+##ibly
+gallagher
+##lau
+##pd
+earnest
+##ieu
+##iary
+inflicted
+objections
+##llar
+asa
+gritted
+##athy
+jericho
+##sea
+##was
+flick
+underside
+ceramics
+undead
+substituted
+195
+eastward
+undoubtedly
+wheeled
+chimney
+##iche
+guinness
+cb
+##ager
+siding
+##bell
+traitor
+baptiste
+disguised
+inauguration
+149
+tipperary
+choreographer
+perched
+warmed
+stationary
+eco
+##ike
+##ntes
+bacterial
+##aurus
+flores
+phosphate
+##core
+attacker
+invaders
+alvin
+intersects
+a1
+indirectly
+immigrated
+businessmen
+cornelius
+valves
+narrated
+pill
+sober
+ul
+nationale
+monastic
+applicants
+scenery
+##jack
+161
+motifs
+constitutes
+cpu
+##osh
+jurisdictions
+sd
+tuning
+irritation
+woven
+##uddin
+fertility
+gao
+##erie
+antagonist
+impatient
+glacial
+hides
+boarded
+denominations
+interception
+##jas
+cookie
+nicola
+##tee
+algebraic
+marquess
+bahn
+parole
+buyers
+bait
+turbines
+paperwork
+bestowed
+natasha
+renee
+oceans
+purchases
+157
+vaccine
+215
+##tock
+fixtures
+playhouse
+integrate
+jai
+oswald
+intellectuals
+##cky
+booked
+nests
+mortimer
+##isi
+obsession
+sept
+##gler
+##sum
+440
+scrutiny
+simultaneous
+squinted
+##shin
+collects
+oven
+shankar
+penned
+remarkably
+##я
+slips
+luggage
+spectral
+1786
+collaborations
+louie
+consolidation
+##ailed
+##ivating
+420
+hoover
+blackpool
+harness
+ignition
+vest
+tails
+belmont
+mongol
+skinner
+##nae
+visually
+mage
+derry
+##tism
+##unce
+stevie
+transitional
+##rdy
+redskins
+drying
+prep
+prospective
+##21
+annoyance
+oversee
+##loaded
+fills
+##books
+##iki
+announces
+fda
+scowled
+respects
+prasad
+mystic
+tucson
+##vale
+revue
+springer
+bankrupt
+1772
+aristotle
+salvatore
+habsburg
+##geny
+dal
+natal
+nut
+pod
+chewing
+darts
+moroccan
+walkover
+rosario
+lenin
+punjabi
+##ße
+grossed
+scattering
+wired
+invasive
+hui
+polynomial
+corridors
+wakes
+gina
+portrays
+##cratic
+arid
+retreating
+erich
+irwin
+sniper
+##dha
+linen
+lindsey
+maneuver
+butch
+shutting
+socio
+bounce
+commemorative
+postseason
+jeremiah
+pines
+275
+mystical
+beads
+bp
+abbas
+furnace
+bidding
+consulted
+assaulted
+empirical
+rubble
+enclosure
+sob
+weakly
+cancel
+polly
+yielded
+##emann
+curly
+prediction
+battered
+70s
+vhs
+jacqueline
+render
+sails
+barked
+detailing
+grayson
+riga
+sloane
+raging
+##yah
+herbs
+bravo
+##athlon
+alloy
+giggle
+imminent
+suffers
+assumptions
+waltz
+##itate
+accomplishments
+##ited
+bathing
+remixed
+deception
+prefix
+##emia
+deepest
+##tier
+##eis
+balkan
+frogs
+##rong
+slab
+##pate
+philosophers
+peterborough
+grains
+imports
+dickinson
+rwanda
+##atics
+1774
+dirk
+lan
+tablets
+##rove
+clone
+##rice
+caretaker
+hostilities
+mclean
+##gre
+regimental
+treasures
+norms
+impose
+tsar
+tango
+diplomacy
+variously
+complain
+192
+recognise
+arrests
+1779
+celestial
+pulitzer
+##dus
+bing
+libretto
+##moor
+adele
+splash
+##rite
+expectation
+lds
+confronts
+##izer
+spontaneous
+harmful
+wedge
+entrepreneurs
+buyer
+##ope
+bilingual
+translate
+rugged
+conner
+circulated
+uae
+eaton
+##gra
+##zzle
+lingered
+lockheed
+vishnu
+reelection
+alonso
+##oom
+joints
+yankee
+headline
+cooperate
+heinz
+laureate
+invading
+##sford
+echoes
+scandinavian
+##dham
+hugging
+vitamin
+salute
+micah
+hind
+trader
+##sper
+radioactive
+##ndra
+militants
+poisoned
+ratified
+remark
+campeonato
+deprived
+wander
+prop
+##dong
+outlook
+##tani
+##rix
+##eye
+chiang
+darcy
+##oping
+mandolin
+spice
+statesman
+babylon
+182
+walled
+forgetting
+afro
+##cap
+158
+giorgio
+buffer
+##polis
+planetary
+##gis
+overlap
+terminals
+kinda
+centenary
+##bir
+arising
+manipulate
+elm
+ke
+1770
+ak
+##tad
+chrysler
+mapped
+moose
+pomeranian
+quad
+macarthur
+assemblies
+shoreline
+recalls
+stratford
+##rted
+noticeable
+##evic
+imp
+##rita
+##sque
+accustomed
+supplying
+tents
+disgusted
+vogue
+sipped
+filters
+khz
+reno
+selecting
+luftwaffe
+mcmahon
+tyne
+masterpiece
+carriages
+collided
+dunes
+exercised
+flare
+remembers
+muzzle
+##mobile
+heck
+##rson
+burgess
+lunged
+middleton
+boycott
+bilateral
+##sity
+hazardous
+lumpur
+multiplayer
+spotlight
+jackets
+goldman
+liege
+porcelain
+rag
+waterford
+benz
+attracts
+hopeful
+battling
+ottomans
+kensington
+baked
+hymns
+cheyenne
+lattice
+levine
+borrow
+polymer
+clashes
+michaels
+monitored
+commitments
+denounced
+##25
+##von
+cavity
+##oney
+hobby
+akin
+##holders
+futures
+intricate
+cornish
+patty
+##oned
+illegally
+dolphin
+##lag
+barlow
+yellowish
+maddie
+apologized
+luton
+plagued
+##puram
+nana
+##rds
+sway
+fanny
+łodz
+##rino
+psi
+suspicions
+hanged
+##eding
+initiate
+charlton
+##por
+nak
+competent
+235
+analytical
+annex
+wardrobe
+reservations
+##rma
+sect
+162
+fairfax
+hedge
+piled
+buckingham
+uneven
+bauer
+simplicity
+snyder
+interpret
+accountability
+donors
+moderately
+byrd
+continents
+##cite
+##max
+disciple
+hr
+jamaican
+ping
+nominees
+##uss
+mongolian
+diver
+attackers
+eagerly
+ideological
+pillows
+miracles
+apartheid
+revolver
+sulfur
+clinics
+moran
+163
+##enko
+ile
+katy
+rhetoric
+##icated
+chronology
+recycling
+##hrer
+elongated
+mughal
+pascal
+profiles
+vibration
+databases
+domination
+##fare
+##rant
+matthias
+digest
+rehearsal
+polling
+weiss
+initiation
+reeves
+clinging
+flourished
+impress
+ngo
+##hoff
+##ume
+buckley
+symposium
+rhythms
+weed
+emphasize
+transforming
+##taking
+##gence
+##yman
+accountant
+analyze
+flicker
+foil
+priesthood
+voluntarily
+decreases
+##80
+##hya
+slater
+sv
+charting
+mcgill
+##lde
+moreno
+##iu
+besieged
+zur
+robes
+##phic
+admitting
+api
+deported
+turmoil
+peyton
+earthquakes
+##ares
+nationalists
+beau
+clair
+brethren
+interrupt
+welch
+curated
+galerie
+requesting
+164
+##ested
+impending
+steward
+viper
+##vina
+complaining
+beautifully
+brandy
+foam
+nl
+1660
+##cake
+alessandro
+punches
+laced
+explanations
+##lim
+attribute
+clit
+reggie
+discomfort
+##cards
+smoothed
+whales
+##cene
+adler
+countered
+duffy
+disciplinary
+widening
+recipe
+reliance
+conducts
+goats
+gradient
+preaching
+##shaw
+matilda
+quasi
+striped
+meridian
+cannabis
+cordoba
+certificates
+##agh
+##tering
+graffiti
+hangs
+pilgrims
+repeats
+##ych
+revive
+urine
+etat
+##hawk
+fueled
+belts
+fuzzy
+susceptible
+##hang
+mauritius
+salle
+sincere
+beers
+hooks
+##cki
+arbitration
+entrusted
+advise
+sniffed
+seminar
+junk
+donnell
+processors
+principality
+strapped
+celia
+mendoza
+everton
+fortunes
+prejudice
+starving
+reassigned
+steamer
+##lund
+tuck
+evenly
+foreman
+##ffen
+dans
+375
+envisioned
+slit
+##xy
+baseman
+liberia
+rosemary
+##weed
+electrified
+periodically
+potassium
+stride
+contexts
+sperm
+slade
+mariners
+influx
+bianca
+subcommittee
+##rane
+spilling
+icao
+estuary
+##nock
+delivers
+iphone
+##ulata
+isa
+mira
+bohemian
+dessert
+##sbury
+welcoming
+proudly
+slowing
+##chs
+musee
+ascension
+russ
+##vian
+waits
+##psy
+africans
+exploit
+##morphic
+gov
+eccentric
+crab
+peck
+##ull
+entrances
+formidable
+marketplace
+groom
+bolted
+metabolism
+patton
+robbins
+courier
+payload
+endure
+##ifier
+andes
+refrigerator
+##pr
+ornate
+##uca
+ruthless
+illegitimate
+masonry
+strasbourg
+bikes
+adobe
+##³
+apples
+quintet
+willingly
+niche
+bakery
+corpses
+energetic
+##cliffe
+##sser
+##ards
+177
+centimeters
+centro
+fuscous
+cretaceous
+rancho
+##yde
+andrei
+telecom
+tottenham
+oasis
+ordination
+vulnerability
+presiding
+corey
+cp
+penguins
+sims
+##pis
+malawi
+piss
+##48
+correction
+##cked
+##ffle
+##ryn
+countdown
+detectives
+psychiatrist
+psychedelic
+dinosaurs
+blouse
+##get
+choi
+vowed
+##oz
+randomly
+##pol
+49ers
+scrub
+blanche
+bruins
+dusseldorf
+##using
+unwanted
+##ums
+212
+dominique
+elevations
+headlights
+om
+laguna
+##oga
+1750
+famously
+ignorance
+shrewsbury
+##aine
+ajax
+breuning
+che
+confederacy
+greco
+overhaul
+##screen
+paz
+skirts
+disagreement
+cruelty
+jagged
+phoebe
+shifter
+hovered
+viruses
+##wes
+mandy
+##lined
+##gc
+landlord
+squirrel
+dashed
+##ι
+ornamental
+gag
+wally
+grange
+literal
+spurs
+undisclosed
+proceeding
+yin
+##text
+billie
+orphan
+spanned
+humidity
+indy
+weighted
+presentations
+explosions
+lucian
+##tary
+vaughn
+hindus
+##anga
+##hell
+psycho
+171
+daytona
+protects
+efficiently
+rematch
+sly
+tandem
+##oya
+rebranded
+impaired
+hee
+metropolis
+peach
+godfrey
+diaspora
+ethnicity
+prosperous
+gleaming
+dar
+grossing
+playback
+##rden
+stripe
+pistols
+##tain
+births
+labelled
+##cating
+172
+rudy
+alba
+##onne
+aquarium
+hostility
+##gb
+##tase
+shudder
+sumatra
+hardest
+lakers
+consonant
+creeping
+demos
+homicide
+capsule
+zeke
+liberties
+expulsion
+pueblo
+##comb
+trait
+transporting
+##ddin
+##neck
+##yna
+depart
+gregg
+mold
+ledge
+hangar
+oldham
+playboy
+termination
+analysts
+gmbh
+romero
+##itic
+insist
+cradle
+filthy
+brightness
+slash
+shootout
+deposed
+bordering
+##truct
+isis
+microwave
+tumbled
+sheltered
+cathy
+werewolves
+messy
+andersen
+convex
+clapped
+clinched
+satire
+wasting
+edo
+vc
+rufus
+##jak
+mont
+##etti
+poznan
+##keeping
+restructuring
+transverse
+##rland
+azerbaijani
+slovene
+gestures
+roommate
+choking
+shear
+##quist
+vanguard
+oblivious
+##hiro
+disagreed
+baptism
+##lich
+coliseum
+##aceae
+salvage
+societe
+cory
+locke
+relocation
+relying
+versailles
+ahl
+swelling
+##elo
+cheerful
+##word
+##edes
+gin
+sarajevo
+obstacle
+diverted
+##nac
+messed
+thoroughbred
+fluttered
+utrecht
+chewed
+acquaintance
+assassins
+dispatch
+mirza
+##wart
+nike
+salzburg
+swell
+yen
+##gee
+idle
+ligue
+samson
+##nds
+##igh
+playful
+spawned
+##cise
+tease
+##case
+burgundy
+##bot
+stirring
+skeptical
+interceptions
+marathi
+##dies
+bedrooms
+aroused
+pinch
+##lik
+preferences
+tattoos
+buster
+digitally
+projecting
+rust
+##ital
+kitten
+priorities
+addison
+pseudo
+##guard
+dusk
+icons
+sermon
+##psis
+##iba
+bt
+##lift
+##xt
+ju
+truce
+rink
+##dah
+##wy
+defects
+psychiatry
+offences
+calculate
+glucose
+##iful
+##rized
+##unda
+francaise
+##hari
+richest
+warwickshire
+carly
+1763
+purity
+redemption
+lending
+##cious
+muse
+bruises
+cerebral
+aero
+carving
+##name
+preface
+terminology
+invade
+monty
+##int
+anarchist
+blurred
+##iled
+rossi
+treats
+guts
+shu
+foothills
+ballads
+undertaking
+premise
+cecilia
+affiliates
+blasted
+conditional
+wilder
+minors
+drone
+rudolph
+buffy
+swallowing
+horton
+attested
+##hop
+rutherford
+howell
+primetime
+livery
+penal
+##bis
+minimize
+hydro
+wrecked
+wrought
+palazzo
+##gling
+cans
+vernacular
+friedman
+nobleman
+shale
+walnut
+danielle
+##ection
+##tley
+sears
+##kumar
+chords
+lend
+flipping
+streamed
+por
+dracula
+gallons
+sacrifices
+gamble
+orphanage
+##iman
+mckenzie
+##gible
+boxers
+daly
+##balls
+##ان
+208
+##ific
+##rative
+##iq
+exploited
+slated
+##uity
+circling
+hillary
+pinched
+goldberg
+provost
+campaigning
+lim
+piles
+ironically
+jong
+mohan
+successors
+usaf
+##tem
+##ught
+autobiographical
+haute
+preserves
+##ending
+acquitted
+comparisons
+203
+hydroelectric
+gangs
+cypriot
+torpedoes
+rushes
+chrome
+derive
+bumps
+instability
+fiat
+pets
+##mbe
+silas
+dye
+reckless
+settler
+##itation
+info
+heats
+##writing
+176
+canonical
+maltese
+fins
+mushroom
+stacy
+aspen
+avid
+##kur
+##loading
+vickers
+gaston
+hillside
+statutes
+wilde
+gail
+kung
+sabine
+comfortably
+motorcycles
+##rgo
+169
+pneumonia
+fetch
+##sonic
+axel
+faintly
+parallels
+##oop
+mclaren
+spouse
+compton
+interdisciplinary
+miner
+##eni
+181
+clamped
+##chal
+##llah
+separates
+versa
+##mler
+scarborough
+labrador
+##lity
+##osing
+rutgers
+hurdles
+como
+166
+burt
+divers
+##100
+wichita
+cade
+coincided
+##erson
+bruised
+mla
+##pper
+vineyard
+##ili
+##brush
+notch
+mentioning
+jase
+hearted
+kits
+doe
+##acle
+pomerania
+##ady
+ronan
+seizure
+pavel
+problematic
+##zaki
+domenico
+##ulin
+catering
+penelope
+dependence
+parental
+emilio
+ministerial
+atkinson
+##bolic
+clarkson
+chargers
+colby
+grill
+peeked
+arises
+summon
+##aged
+fools
+##grapher
+faculties
+qaeda
+##vial
+garner
+refurbished
+##hwa
+geelong
+disasters
+nudged
+bs
+shareholder
+lori
+algae
+reinstated
+rot
+##ades
+##nous
+invites
+stainless
+183
+inclusive
+##itude
+diocesan
+til
+##icz
+denomination
+##xa
+benton
+floral
+registers
+##ider
+##erman
+##kell
+absurd
+brunei
+guangzhou
+hitter
+retaliation
+##uled
+##eve
+blanc
+nh
+consistency
+contamination
+##eres
+##rner
+dire
+palermo
+broadcasters
+diaries
+inspire
+vols
+brewer
+tightening
+ky
+mixtape
+hormone
+##tok
+stokes
+##color
+##dly
+##ssi
+pg
+##ometer
+##lington
+sanitation
+##tility
+intercontinental
+apps
+##adt
+¹⁄₂
+cylinders
+economies
+favourable
+unison
+croix
+gertrude
+odyssey
+vanity
+dangling
+##logists
+upgrades
+dice
+middleweight
+practitioner
+##ight
+206
+henrik
+parlor
+orion
+angered
+lac
+python
+blurted
+##rri
+sensual
+intends
+swings
+angled
+##phs
+husky
+attain
+peerage
+precinct
+textiles
+cheltenham
+shuffled
+dai
+confess
+tasting
+bhutan
+##riation
+tyrone
+segregation
+abrupt
+ruiz
+##rish
+smirked
+blackwell
+confidential
+browning
+amounted
+##put
+vase
+scarce
+fabulous
+raided
+staple
+guyana
+unemployed
+glider
+shay
+##tow
+carmine
+troll
+intervene
+squash
+superstar
+##uce
+cylindrical
+len
+roadway
+researched
+handy
+##rium
+##jana
+meta
+lao
+declares
+##rring
+##tadt
+##elin
+##kova
+willem
+shrubs
+napoleonic
+realms
+skater
+qi
+volkswagen
+##ł
+tad
+hara
+archaeologist
+awkwardly
+eerie
+##kind
+wiley
+##heimer
+##24
+titus
+organizers
+cfl
+crusaders
+lama
+usb
+vent
+enraged
+thankful
+occupants
+maximilian
+##gaard
+possessing
+textbooks
+##oran
+collaborator
+quaker
+##ulo
+avalanche
+mono
+silky
+straits
+isaiah
+mustang
+surged
+resolutions
+potomac
+descend
+cl
+kilograms
+plato
+strains
+saturdays
+##olin
+bernstein
+##ype
+holstein
+ponytail
+##watch
+belize
+conversely
+heroine
+perpetual
+##ylus
+charcoal
+piedmont
+glee
+negotiating
+backdrop
+prologue
+##jah
+##mmy
+pasadena
+climbs
+ramos
+sunni
+##holm
+##tner
+##tri
+anand
+deficiency
+hertfordshire
+stout
+##avi
+aperture
+orioles
+##irs
+doncaster
+intrigued
+bombed
+coating
+otis
+##mat
+cocktail
+##jit
+##eto
+amir
+arousal
+sar
+##proof
+##act
+##ories
+dixie
+pots
+##bow
+whereabouts
+159
+##fted
+drains
+bullying
+cottages
+scripture
+coherent
+fore
+poe
+appetite
+##uration
+sampled
+##ators
+##dp
+derrick
+rotor
+jays
+peacock
+installment
+##rro
+advisors
+##coming
+rodeo
+scotch
+##mot
+##db
+##fen
+##vant
+ensued
+rodrigo
+dictatorship
+martyrs
+twenties
+##н
+towed
+incidence
+marta
+rainforest
+sai
+scaled
+##cles
+oceanic
+qualifiers
+symphonic
+mcbride
+dislike
+generalized
+aubrey
+colonization
+##iation
+##lion
+##ssing
+disliked
+lublin
+salesman
+##ulates
+spherical
+whatsoever
+sweating
+avalon
+contention
+punt
+severity
+alderman
+atari
+##dina
+##grant
+##rop
+scarf
+seville
+vertices
+annexation
+fairfield
+fascination
+inspiring
+launches
+palatinate
+regretted
+##rca
+feral
+##iom
+elk
+nap
+olsen
+reddy
+yong
+##leader
+##iae
+garment
+transports
+feng
+gracie
+outrage
+viceroy
+insides
+##esis
+breakup
+grady
+organizer
+softer
+grimaced
+222
+murals
+galicia
+arranging
+vectors
+##rsten
+bas
+##sb
+##cens
+sloan
+##eka
+bitten
+ara
+fender
+nausea
+bumped
+kris
+banquet
+comrades
+detector
+persisted
+##llan
+adjustment
+endowed
+cinemas
+##shot
+sellers
+##uman
+peek
+epa
+kindly
+neglect
+simpsons
+talon
+mausoleum
+runaway
+hangul
+lookout
+##cic
+rewards
+coughed
+acquainted
+chloride
+##ald
+quicker
+accordion
+neolithic
+##qa
+artemis
+coefficient
+lenny
+pandora
+tx
+##xed
+ecstasy
+litter
+segunda
+chairperson
+gemma
+hiss
+rumor
+vow
+nasal
+antioch
+compensate
+patiently
+transformers
+##eded
+judo
+morrow
+penis
+posthumous
+philips
+bandits
+husbands
+denote
+flaming
+##any
+##phones
+langley
+yorker
+1760
+walters
+##uo
+##kle
+gubernatorial
+fatty
+samsung
+leroy
+outlaw
+##nine
+unpublished
+poole
+jakob
+##ᵢ
+##ₙ
+crete
+distorted
+superiority
+##dhi
+intercept
+crust
+mig
+claus
+crashes
+positioning
+188
+stallion
+301
+frontal
+armistice
+##estinal
+elton
+aj
+encompassing
+camel
+commemorated
+malaria
+woodward
+calf
+cigar
+penetrate
+##oso
+willard
+##rno
+##uche
+illustrate
+amusing
+convergence
+noteworthy
+##lma
+##rva
+journeys
+realise
+manfred
+##sable
+410
+##vocation
+hearings
+fiance
+##posed
+educators
+provoked
+adjusting
+##cturing
+modular
+stockton
+paterson
+vlad
+rejects
+electors
+selena
+maureen
+##tres
+uber
+##rce
+swirled
+##num
+proportions
+nanny
+pawn
+naturalist
+parma
+apostles
+awoke
+ethel
+wen
+##bey
+monsoon
+overview
+##inating
+mccain
+rendition
+risky
+adorned
+##ih
+equestrian
+germain
+nj
+conspicuous
+confirming
+##yoshi
+shivering
+##imeter
+milestone
+rumours
+flinched
+bounds
+smacked
+token
+##bei
+lectured
+automobiles
+##shore
+impacted
+##iable
+nouns
+nero
+##leaf
+ismail
+prostitute
+trams
+##lace
+bridget
+sud
+stimulus
+impressions
+reins
+revolves
+##oud
+##gned
+giro
+honeymoon
+##swell
+criterion
+##sms
+##uil
+libyan
+prefers
+##osition
+211
+preview
+sucks
+accusation
+bursts
+metaphor
+diffusion
+tolerate
+faye
+betting
+cinematographer
+liturgical
+specials
+bitterly
+humboldt
+##ckle
+flux
+rattled
+##itzer
+archaeologists
+odor
+authorised
+marshes
+discretion
+##ов
+alarmed
+archaic
+inverse
+##leton
+explorers
+##pine
+drummond
+tsunami
+woodlands
+##minate
+##tland
+booklet
+insanity
+owning
+insert
+crafted
+calculus
+##tore
+receivers
+##bt
+stung
+##eca
+##nched
+prevailing
+travellers
+eyeing
+lila
+graphs
+##borne
+178
+julien
+##won
+morale
+adaptive
+therapist
+erica
+cw
+libertarian
+bowman
+pitches
+vita
+##ional
+crook
+##ads
+##entation
+caledonia
+mutiny
+##sible
+1840s
+automation
+##ß
+flock
+##pia
+ironic
+pathology
+##imus
+remarried
+##22
+joker
+withstand
+energies
+##att
+shropshire
+hostages
+madeleine
+tentatively
+conflicting
+mateo
+recipes
+euros
+ol
+mercenaries
+nico
+##ndon
+albuquerque
+augmented
+mythical
+bel
+freud
+##child
+cough
+##lica
+365
+freddy
+lillian
+genetically
+nuremberg
+calder
+209
+bonn
+outdoors
+paste
+suns
+urgency
+vin
+restraint
+tyson
+##cera
+##selle
+barrage
+bethlehem
+kahn
+##par
+mounts
+nippon
+barony
+happier
+ryu
+makeshift
+sheldon
+blushed
+castillo
+barking
+listener
+taped
+bethel
+fluent
+headlines
+pornography
+rum
+disclosure
+sighing
+mace
+doubling
+gunther
+manly
+##plex
+rt
+interventions
+physiological
+forwards
+emerges
+##tooth
+##gny
+compliment
+rib
+recession
+visibly
+barge
+faults
+connector
+exquisite
+prefect
+##rlin
+patio
+##cured
+elevators
+brandt
+italics
+pena
+173
+wasp
+satin
+ea
+botswana
+graceful
+respectable
+##jima
+##rter
+##oic
+franciscan
+generates
+##dl
+alfredo
+disgusting
+##olate
+##iously
+sherwood
+warns
+cod
+promo
+cheryl
+sino
+##ة
+##escu
+twitch
+##zhi
+brownish
+thom
+ortiz
+##dron
+densely
+##beat
+carmel
+reinforce
+##bana
+187
+anastasia
+downhill
+vertex
+contaminated
+remembrance
+harmonic
+homework
+##sol
+fiancee
+gears
+olds
+angelica
+loft
+ramsay
+quiz
+colliery
+sevens
+##cape
+autism
+##hil
+walkway
+##boats
+ruben
+abnormal
+ounce
+khmer
+##bbe
+zachary
+bedside
+morphology
+punching
+##olar
+sparrow
+convinces
+##35
+hewitt
+queer
+remastered
+rods
+mabel
+solemn
+notified
+lyricist
+symmetric
+##xide
+174
+encore
+passports
+wildcats
+##uni
+baja
+##pac
+mildly
+##ease
+bleed
+commodity
+mounds
+glossy
+orchestras
+##omo
+damian
+prelude
+ambitions
+##vet
+awhile
+remotely
+##aud
+asserts
+imply
+##iques
+distinctly
+modelling
+remedy
+##dded
+windshield
+dani
+xiao
+##endra
+audible
+powerplant
+1300
+invalid
+elemental
+acquisitions
+##hala
+immaculate
+libby
+plata
+smuggling
+ventilation
+denoted
+minh
+##morphism
+430
+differed
+dion
+kelley
+lore
+mocking
+sabbath
+spikes
+hygiene
+drown
+runoff
+stylized
+tally
+liberated
+aux
+interpreter
+righteous
+aba
+siren
+reaper
+pearce
+millie
+##cier
+##yra
+gaius
+##iso
+captures
+##ttering
+dorm
+claudio
+##sic
+benches
+knighted
+blackness
+##ored
+discount
+fumble
+oxidation
+routed
+##ς
+novak
+perpendicular
+spoiled
+fracture
+splits
+##urt
+pads
+topology
+##cats
+axes
+fortunate
+offenders
+protestants
+esteem
+221
+broadband
+convened
+frankly
+hound
+prototypes
+isil
+facilitated
+keel
+##sher
+sahara
+awaited
+bubba
+orb
+prosecutors
+186
+hem
+520
+##xing
+relaxing
+remnant
+romney
+sorted
+slalom
+stefano
+ulrich
+##active
+exemption
+folder
+pauses
+foliage
+hitchcock
+epithet
+204
+criticisms
+##aca
+ballistic
+brody
+hinduism
+chaotic
+youths
+equals
+##pala
+pts
+thicker
+analogous
+capitalist
+improvised
+overseeing
+sinatra
+ascended
+beverage
+##tl
+straightforward
+##kon
+curran
+##west
+bois
+325
+induce
+surveying
+emperors
+sax
+unpopular
+##kk
+cartoonist
+fused
+##mble
+unto
+##yuki
+localities
+##cko
+##ln
+darlington
+slain
+academie
+lobbying
+sediment
+puzzles
+##grass
+defiance
+dickens
+manifest
+tongues
+alumnus
+arbor
+coincide
+184
+appalachian
+mustafa
+examiner
+cabaret
+traumatic
+yves
+bracelet
+draining
+heroin
+magnum
+baths
+odessa
+consonants
+mitsubishi
+##gua
+kellan
+vaudeville
+##fr
+joked
+null
+straps
+probation
+##ław
+ceded
+interfaces
+##pas
+##zawa
+blinding
+viet
+224
+rothschild
+museo
+640
+huddersfield
+##vr
+tactic
+##storm
+brackets
+dazed
+incorrectly
+##vu
+reg
+glazed
+fearful
+manifold
+benefited
+irony
+##sun
+stumbling
+##rte
+willingness
+balkans
+mei
+wraps
+##aba
+injected
+##lea
+gu
+syed
+harmless
+##hammer
+bray
+takeoff
+poppy
+timor
+cardboard
+astronaut
+purdue
+weeping
+southbound
+cursing
+stalls
+diagonal
+##neer
+lamar
+bryce
+comte
+weekdays
+harrington
+##uba
+negatively
+##see
+lays
+grouping
+##cken
+##henko
+affirmed
+halle
+modernist
+##lai
+hodges
+smelling
+aristocratic
+baptized
+dismiss
+justification
+oilers
+##now
+coupling
+qin
+snack
+healer
+##qing
+gardener
+layla
+battled
+formulated
+stephenson
+gravitational
+##gill
+##jun
+1768
+granny
+coordinating
+suites
+##cd
+##ioned
+monarchs
+##cote
+##hips
+sep
+blended
+apr
+barrister
+deposition
+fia
+mina
+policemen
+paranoid
+##pressed
+churchyard
+covert
+crumpled
+creep
+abandoning
+tr
+transmit
+conceal
+barr
+understands
+readiness
+spire
+##cology
+##enia
+##erry
+610
+startling
+unlock
+vida
+bowled
+slots
+##nat
+##islav
+spaced
+trusting
+admire
+rig
+##ink
+slack
+##70
+mv
+207
+casualty
+##wei
+classmates
+##odes
+##rar
+##rked
+amherst
+furnished
+evolve
+foundry
+menace
+mead
+##lein
+flu
+wesleyan
+##kled
+monterey
+webber
+##vos
+wil
+##mith
+##на
+bartholomew
+justices
+restrained
+##cke
+amenities
+191
+mediated
+sewage
+trenches
+ml
+mainz
+##thus
+1800s
+##cula
+##inski
+caine
+bonding
+213
+converts
+spheres
+superseded
+marianne
+crypt
+sweaty
+ensign
+historia
+##br
+spruce
+##post
+##ask
+forks
+thoughtfully
+yukon
+pamphlet
+ames
+##uter
+karma
+##yya
+bryn
+negotiation
+sighs
+incapable
+##mbre
+##ntial
+actresses
+taft
+##mill
+luce
+prevailed
+##amine
+1773
+motionless
+envoy
+testify
+investing
+sculpted
+instructors
+provence
+kali
+cullen
+horseback
+##while
+goodwin
+##jos
+gaa
+norte
+##ldon
+modify
+wavelength
+abd
+214
+skinned
+sprinter
+forecast
+scheduling
+marries
+squared
+tentative
+##chman
+boer
+##isch
+bolts
+swap
+fisherman
+assyrian
+impatiently
+guthrie
+martins
+murdoch
+194
+tanya
+nicely
+dolly
+lacy
+med
+##45
+syn
+decks
+fashionable
+millionaire
+##ust
+surfing
+##ml
+##ision
+heaved
+tammy
+consulate
+attendees
+routinely
+197
+fuse
+saxophonist
+backseat
+malaya
+##lord
+scowl
+tau
+##ishly
+193
+sighted
+steaming
+##rks
+303
+911
+##holes
+##hong
+ching
+##wife
+bless
+conserved
+jurassic
+stacey
+unix
+zion
+chunk
+rigorous
+blaine
+198
+peabody
+slayer
+dismay
+brewers
+nz
+##jer
+det
+##glia
+glover
+postwar
+int
+penetration
+sylvester
+imitation
+vertically
+airlift
+heiress
+knoxville
+viva
+##uin
+390
+macon
+##rim
+##fighter
+##gonal
+janice
+##orescence
+##wari
+marius
+belongings
+leicestershire
+196
+blanco
+inverted
+preseason
+sanity
+sobbing
+##due
+##elt
+##dled
+collingwood
+regeneration
+flickering
+shortest
+##mount
+##osi
+feminism
+##lat
+sherlock
+cabinets
+fumbled
+northbound
+precedent
+snaps
+##mme
+researching
+##akes
+guillaume
+insights
+manipulated
+vapor
+neighbour
+sap
+gangster
+frey
+f1
+stalking
+scarcely
+callie
+barnett
+tendencies
+audi
+doomed
+assessing
+slung
+panchayat
+ambiguous
+bartlett
+##etto
+distributing
+violating
+wolverhampton
+##hetic
+swami
+histoire
+##urus
+liable
+pounder
+groin
+hussain
+larsen
+popping
+surprises
+##atter
+vie
+curt
+##station
+mute
+relocate
+musicals
+authorization
+richter
+##sef
+immortality
+tna
+bombings
+##press
+deteriorated
+yiddish
+##acious
+robbed
+colchester
+cs
+pmid
+ao
+verified
+balancing
+apostle
+swayed
+recognizable
+oxfordshire
+retention
+nottinghamshire
+contender
+judd
+invitational
+shrimp
+uhf
+##icient
+cleaner
+longitudinal
+tanker
+##mur
+acronym
+broker
+koppen
+sundance
+suppliers
+##gil
+4000
+clipped
+fuels
+petite
+##anne
+landslide
+helene
+diversion
+populous
+landowners
+auspices
+melville
+quantitative
+##xes
+ferries
+nicky
+##llus
+doo
+haunting
+roche
+carver
+downed
+unavailable
+##pathy
+approximation
+hiroshima
+##hue
+garfield
+valle
+comparatively
+keyboardist
+traveler
+##eit
+congestion
+calculating
+subsidiaries
+##bate
+serb
+modernization
+fairies
+deepened
+ville
+averages
+##lore
+inflammatory
+tonga
+##itch
+co₂
+squads
+##hea
+gigantic
+serum
+enjoyment
+retailer
+verona
+35th
+cis
+##phobic
+magna
+technicians
+##vati
+arithmetic
+##sport
+levin
+##dation
+amtrak
+chow
+sienna
+##eyer
+backstage
+entrepreneurship
+##otic
+learnt
+tao
+##udy
+worcestershire
+formulation
+baggage
+hesitant
+bali
+sabotage
+##kari
+barren
+enhancing
+murmur
+pl
+freshly
+putnam
+syntax
+aces
+medicines
+resentment
+bandwidth
+##sier
+grins
+chili
+guido
+##sei
+framing
+implying
+gareth
+lissa
+genevieve
+pertaining
+admissions
+geo
+thorpe
+proliferation
+sato
+bela
+analyzing
+parting
+##gor
+awakened
+##isman
+huddled
+secrecy
+##kling
+hush
+gentry
+540
+dungeons
+##ego
+coasts
+##utz
+sacrificed
+##chule
+landowner
+mutually
+prevalence
+programmer
+adolescent
+disrupted
+seaside
+gee
+trusts
+vamp
+georgie
+##nesian
+##iol
+schedules
+sindh
+##market
+etched
+hm
+sparse
+bey
+beaux
+scratching
+gliding
+unidentified
+216
+collaborating
+gems
+jesuits
+oro
+accumulation
+shaping
+mbe
+anal
+##xin
+231
+enthusiasts
+newscast
+##egan
+janata
+dewey
+parkinson
+179
+ankara
+biennial
+towering
+dd
+inconsistent
+950
+##chet
+thriving
+terminate
+cabins
+furiously
+eats
+advocating
+donkey
+marley
+muster
+phyllis
+leiden
+##user
+grassland
+glittering
+iucn
+loneliness
+217
+memorandum
+armenians
+##ddle
+popularized
+rhodesia
+60s
+lame
+##illon
+sans
+bikini
+header
+orbits
+##xx
+##finger
+##ulator
+sharif
+spines
+biotechnology
+strolled
+naughty
+yates
+##wire
+fremantle
+milo
+##mour
+abducted
+removes
+##atin
+humming
+wonderland
+##chrome
+##ester
+hume
+pivotal
+##rates
+armand
+grams
+believers
+elector
+rte
+apron
+bis
+scraped
+##yria
+endorsement
+initials
+##llation
+eps
+dotted
+hints
+buzzing
+emigration
+nearer
+##tom
+indicators
+##ulu
+coarse
+neutron
+protectorate
+##uze
+directional
+exploits
+pains
+loire
+1830s
+proponents
+guggenheim
+rabbits
+ritchie
+305
+hectare
+inputs
+hutton
+##raz
+verify
+##ako
+boilers
+longitude
+##lev
+skeletal
+yer
+emilia
+citrus
+compromised
+##gau
+pokemon
+prescription
+paragraph
+eduard
+cadillac
+attire
+categorized
+kenyan
+weddings
+charley
+##bourg
+entertain
+monmouth
+##lles
+nutrients
+davey
+mesh
+incentive
+practised
+ecosystems
+kemp
+subdued
+overheard
+##rya
+bodily
+maxim
+##nius
+apprenticeship
+ursula
+##fight
+lodged
+rug
+silesian
+unconstitutional
+patel
+inspected
+coyote
+unbeaten
+##hak
+34th
+disruption
+convict
+parcel
+##cl
+##nham
+collier
+implicated
+mallory
+##iac
+##lab
+susannah
+winkler
+##rber
+shia
+phelps
+sediments
+graphical
+robotic
+##sner
+adulthood
+mart
+smoked
+##isto
+kathryn
+clarified
+##aran
+divides
+convictions
+oppression
+pausing
+burying
+##mt
+federico
+mathias
+eileen
+##tana
+kite
+hunched
+##acies
+189
+##atz
+disadvantage
+liza
+kinetic
+greedy
+paradox
+yokohama
+dowager
+trunks
+ventured
+##gement
+gupta
+vilnius
+olaf
+##thest
+crimean
+hopper
+##ej
+progressively
+arturo
+mouthed
+arrondissement
+##fusion
+rubin
+simulcast
+oceania
+##orum
+##stra
+##rred
+busiest
+intensely
+navigator
+cary
+##vine
+##hini
+##bies
+fife
+rowe
+rowland
+posing
+insurgents
+shafts
+lawsuits
+activate
+conor
+inward
+culturally
+garlic
+265
+##eering
+eclectic
+##hui
+##kee
+##nl
+furrowed
+vargas
+meteorological
+rendezvous
+##aus
+culinary
+commencement
+##dition
+quota
+##notes
+mommy
+salaries
+overlapping
+mule
+##iology
+##mology
+sums
+wentworth
+##isk
+##zione
+mainline
+subgroup
+##illy
+hack
+plaintiff
+verdi
+bulb
+differentiation
+engagements
+multinational
+supplemented
+bertrand
+caller
+regis
+##naire
+##sler
+##arts
+##imated
+blossom
+propagation
+kilometer
+viaduct
+vineyards
+##uate
+beckett
+optimization
+golfer
+songwriters
+seminal
+semitic
+thud
+volatile
+evolving
+ridley
+##wley
+trivial
+distributions
+scandinavia
+jiang
+##ject
+wrestled
+insistence
+##dio
+emphasizes
+napkin
+##ods
+adjunct
+rhyme
+##ricted
+##eti
+hopeless
+surrounds
+tremble
+32nd
+smoky
+##ntly
+oils
+medicinal
+padded
+steer
+wilkes
+219
+255
+concessions
+hue
+uniquely
+blinded
+landon
+yahoo
+##lane
+hendrix
+commemorating
+dex
+specify
+chicks
+##ggio
+intercity
+1400
+morley
+##torm
+highlighting
+##oting
+pang
+oblique
+stalled
+##liner
+flirting
+newborn
+1769
+bishopric
+shaved
+232
+currie
+##ush
+dharma
+spartan
+##ooped
+favorites
+smug
+novella
+sirens
+abusive
+creations
+espana
+##lage
+paradigm
+semiconductor
+sheen
+##rdo
+##yen
+##zak
+nrl
+renew
+##pose
+##tur
+adjutant
+marches
+norma
+##enity
+ineffective
+weimar
+grunt
+##gat
+lordship
+plotting
+expenditure
+infringement
+lbs
+refrain
+av
+mimi
+mistakenly
+postmaster
+1771
+##bara
+ras
+motorsports
+tito
+199
+subjective
+##zza
+bully
+stew
+##kaya
+prescott
+1a
+##raphic
+##zam
+bids
+styling
+paranormal
+reeve
+sneaking
+exploding
+katz
+akbar
+migrant
+syllables
+indefinitely
+##ogical
+destroys
+replaces
+applause
+##phine
+pest
+##fide
+218
+articulated
+bertie
+##thing
+##cars
+##ptic
+courtroom
+crowley
+aesthetics
+cummings
+tehsil
+hormones
+titanic
+dangerously
+##ibe
+stadion
+jaenelle
+auguste
+ciudad
+##chu
+mysore
+partisans
+##sio
+lucan
+philipp
+##aly
+debating
+henley
+interiors
+##rano
+##tious
+homecoming
+beyonce
+usher
+henrietta
+prepares
+weeds
+##oman
+ely
+plucked
+##pire
+##dable
+luxurious
+##aq
+artifact
+password
+pasture
+juno
+maddy
+minsk
+##dder
+##ologies
+##rone
+assessments
+martian
+royalist
+1765
+examines
+##mani
+##rge
+nino
+223
+parry
+scooped
+relativity
+##eli
+##uting
+##cao
+congregational
+noisy
+traverse
+##agawa
+strikeouts
+nickelodeon
+obituary
+transylvania
+binds
+depictions
+polk
+trolley
+##yed
+##lard
+breeders
+##under
+dryly
+hokkaido
+1762
+strengths
+stacks
+bonaparte
+connectivity
+neared
+prostitutes
+stamped
+anaheim
+gutierrez
+sinai
+##zzling
+bram
+fresno
+madhya
+##86
+proton
+##lena
+##llum
+##phon
+reelected
+wanda
+##anus
+##lb
+ample
+distinguishing
+##yler
+grasping
+sermons
+tomato
+bland
+stimulation
+avenues
+##eux
+spreads
+scarlett
+fern
+pentagon
+assert
+baird
+chesapeake
+ir
+calmed
+distortion
+fatalities
+##olis
+correctional
+pricing
+##astic
+##gina
+prom
+dammit
+ying
+collaborate
+##chia
+welterweight
+33rd
+pointer
+substitution
+bonded
+umpire
+communicating
+multitude
+paddle
+##obe
+federally
+intimacy
+##insky
+betray
+ssr
+##lett
+##lean
+##lves
+##therapy
+airbus
+##tery
+functioned
+ud
+bearer
+biomedical
+netflix
+##hire
+##nca
+condom
+brink
+ik
+##nical
+macy
+##bet
+flap
+gma
+experimented
+jelly
+lavender
+##icles
+##ulia
+munro
+##mian
+##tial
+rye
+##rle
+60th
+gigs
+hottest
+rotated
+predictions
+fuji
+bu
+##erence
+##omi
+barangay
+##fulness
+##sas
+clocks
+##rwood
+##liness
+cereal
+roe
+wight
+decker
+uttered
+babu
+onion
+xml
+forcibly
+##df
+petra
+sarcasm
+hartley
+peeled
+storytelling
+##42
+##xley
+##ysis
+##ffa
+fibre
+kiel
+auditor
+fig
+harald
+greenville
+##berries
+geographically
+nell
+quartz
+##athic
+cemeteries
+##lr
+crossings
+nah
+holloway
+reptiles
+chun
+sichuan
+snowy
+660
+corrections
+##ivo
+zheng
+ambassadors
+blacksmith
+fielded
+fluids
+hardcover
+turnover
+medications
+melvin
+academies
+##erton
+ro
+roach
+absorbing
+spaniards
+colton
+##founded
+outsider
+espionage
+kelsey
+245
+edible
+##ulf
+dora
+establishes
+##sham
+##tries
+contracting
+##tania
+cinematic
+costello
+nesting
+##uron
+connolly
+duff
+##nology
+mma
+##mata
+fergus
+sexes
+gi
+optics
+spectator
+woodstock
+banning
+##hee
+##fle
+differentiate
+outfielder
+refinery
+226
+312
+gerhard
+horde
+lair
+drastically
+##udi
+landfall
+##cheng
+motorsport
+odi
+##achi
+predominant
+quay
+skins
+##ental
+edna
+harshly
+complementary
+murdering
+##aves
+wreckage
+##90
+ono
+outstretched
+lennox
+munitions
+galen
+reconcile
+470
+scalp
+bicycles
+gillespie
+questionable
+rosenberg
+guillermo
+hostel
+jarvis
+kabul
+volvo
+opium
+yd
+##twined
+abuses
+decca
+outpost
+##cino
+sensible
+neutrality
+##64
+ponce
+anchorage
+atkins
+turrets
+inadvertently
+disagree
+libre
+vodka
+reassuring
+weighs
+##yal
+glide
+jumper
+ceilings
+repertory
+outs
+stain
+##bial
+envy
+##ucible
+smashing
+heightened
+policing
+hyun
+mixes
+lai
+prima
+##ples
+celeste
+##bina
+lucrative
+intervened
+kc
+manually
+##rned
+stature
+staffed
+bun
+bastards
+nairobi
+priced
+##auer
+thatcher
+##kia
+tripped
+comune
+##ogan
+##pled
+brasil
+incentives
+emanuel
+hereford
+musica
+##kim
+benedictine
+biennale
+##lani
+eureka
+gardiner
+rb
+knocks
+sha
+##ael
+##elled
+##onate
+efficacy
+ventura
+masonic
+sanford
+maize
+leverage
+##feit
+capacities
+santana
+##aur
+novelty
+vanilla
+##cter
+##tour
+benin
+##oir
+##rain
+neptune
+drafting
+tallinn
+##cable
+humiliation
+##boarding
+schleswig
+fabian
+bernardo
+liturgy
+spectacle
+sweeney
+pont
+routledge
+##tment
+cosmos
+ut
+hilt
+sleek
+universally
+##eville
+##gawa
+typed
+##dry
+favors
+allegheny
+glaciers
+##rly
+recalling
+aziz
+##log
+parasite
+requiem
+auf
+##berto
+##llin
+illumination
+##breaker
+##issa
+festivities
+bows
+govern
+vibe
+vp
+333
+sprawled
+larson
+pilgrim
+bwf
+leaping
+##rts
+##ssel
+alexei
+greyhound
+hoarse
+##dler
+##oration
+seneca
+##cule
+gaping
+##ulously
+##pura
+cinnamon
+##gens
+##rricular
+craven
+fantasies
+houghton
+engined
+reigned
+dictator
+supervising
+##oris
+bogota
+commentaries
+unnatural
+fingernails
+spirituality
+tighten
+##tm
+canadiens
+protesting
+intentional
+cheers
+sparta
+##ytic
+##iere
+##zine
+widen
+belgarath
+controllers
+dodd
+iaaf
+navarre
+##ication
+defect
+squire
+steiner
+whisky
+##mins
+560
+inevitably
+tome
+##gold
+chew
+##uid
+##lid
+elastic
+##aby
+streaked
+alliances
+jailed
+regal
+##ined
+##phy
+czechoslovak
+narration
+absently
+##uld
+bluegrass
+guangdong
+quran
+criticizing
+hose
+hari
+##liest
+##owa
+skier
+streaks
+deploy
+##lom
+raft
+bose
+dialed
+huff
+##eira
+haifa
+simplest
+bursting
+endings
+ib
+sultanate
+##titled
+franks
+whitman
+ensures
+sven
+##ggs
+collaborators
+forster
+organising
+ui
+banished
+napier
+injustice
+teller
+layered
+thump
+##otti
+roc
+battleships
+evidenced
+fugitive
+sadie
+robotics
+##roud
+equatorial
+geologist
+##iza
+yielding
+##bron
+##sr
+internationale
+mecca
+##diment
+sbs
+skyline
+toad
+uploaded
+reflective
+undrafted
+lal
+leafs
+bayern
+##dai
+lakshmi
+shortlisted
+##stick
+##wicz
+camouflage
+donate
+af
+christi
+lau
+##acio
+disclosed
+nemesis
+1761
+assemble
+straining
+northamptonshire
+tal
+##asi
+bernardino
+premature
+heidi
+42nd
+coefficients
+galactic
+reproduce
+buzzed
+sensations
+zionist
+monsieur
+myrtle
+##eme
+archery
+strangled
+musically
+viewpoint
+antiquities
+bei
+trailers
+seahawks
+cured
+pee
+preferring
+tasmanian
+lange
+sul
+##mail
+##working
+colder
+overland
+lucivar
+massey
+gatherings
+haitian
+##smith
+disapproval
+flaws
+##cco
+##enbach
+1766
+npr
+##icular
+boroughs
+creole
+forums
+techno
+1755
+dent
+abdominal
+streetcar
+##eson
+##stream
+procurement
+gemini
+predictable
+##tya
+acheron
+christoph
+feeder
+fronts
+vendor
+bernhard
+jammu
+tumors
+slang
+##uber
+goaltender
+twists
+curving
+manson
+vuelta
+mer
+peanut
+confessions
+pouch
+unpredictable
+allowance
+theodor
+vascular
+##factory
+bala
+authenticity
+metabolic
+coughing
+nanjing
+##cea
+pembroke
+##bard
+splendid
+36th
+ff
+hourly
+##ahu
+elmer
+handel
+##ivate
+awarding
+thrusting
+dl
+experimentation
+##hesion
+##46
+caressed
+entertained
+steak
+##rangle
+biologist
+orphans
+baroness
+oyster
+stepfather
+##dridge
+mirage
+reefs
+speeding
+##31
+barons
+1764
+227
+inhabit
+preached
+repealed
+##tral
+honoring
+boogie
+captives
+administer
+johanna
+##imate
+gel
+suspiciously
+1767
+sobs
+##dington
+backbone
+hayward
+garry
+##folding
+##nesia
+maxi
+##oof
+##ppe
+ellison
+galileo
+##stand
+crimea
+frenzy
+amour
+bumper
+matrices
+natalia
+baking
+garth
+palestinians
+##grove
+smack
+conveyed
+ensembles
+gardening
+##manship
+##rup
+##stituting
+1640
+harvesting
+topography
+jing
+shifters
+dormitory
+##carriage
+##lston
+ist
+skulls
+##stadt
+dolores
+jewellery
+sarawak
+##wai
+##zier
+fences
+christy
+confinement
+tumbling
+credibility
+fir
+stench
+##bria
+##plication
+##nged
+##sam
+virtues
+##belt
+marjorie
+pba
+##eem
+##made
+celebrates
+schooner
+agitated
+barley
+fulfilling
+anthropologist
+##pro
+restrict
+novi
+regulating
+##nent
+padres
+##rani
+##hesive
+loyola
+tabitha
+milky
+olson
+proprietor
+crambidae
+guarantees
+intercollegiate
+ljubljana
+hilda
+##sko
+ignorant
+hooded
+##lts
+sardinia
+##lidae
+##vation
+frontman
+privileged
+witchcraft
+##gp
+jammed
+laude
+poking
+##than
+bracket
+amazement
+yunnan
+##erus
+maharaja
+linnaeus
+264
+commissioning
+milano
+peacefully
+##logies
+akira
+rani
+regulator
+##36
+grasses
+##rance
+luzon
+crows
+compiler
+gretchen
+seaman
+edouard
+tab
+buccaneers
+ellington
+hamlets
+whig
+socialists
+##anto
+directorial
+easton
+mythological
+##kr
+##vary
+rhineland
+semantic
+taut
+dune
+inventions
+succeeds
+##iter
+replication
+branched
+##pired
+jul
+prosecuted
+kangaroo
+penetrated
+##avian
+middlesbrough
+doses
+bleak
+madam
+predatory
+relentless
+##vili
+reluctance
+##vir
+hailey
+crore
+silvery
+1759
+monstrous
+swimmers
+transmissions
+hawthorn
+informing
+##eral
+toilets
+caracas
+crouch
+kb
+##sett
+295
+cartel
+hadley
+##aling
+alexia
+yvonne
+##biology
+cinderella
+eton
+superb
+blizzard
+stabbing
+industrialist
+maximus
+##gm
+##orus
+groves
+maud
+clade
+oversized
+comedic
+##bella
+rosen
+nomadic
+fulham
+montane
+beverages
+galaxies
+redundant
+swarm
+##rot
+##folia
+##llis
+buckinghamshire
+fen
+bearings
+bahadur
+##rom
+gilles
+phased
+dynamite
+faber
+benoit
+vip
+##ount
+##wd
+booking
+fractured
+tailored
+anya
+spices
+westwood
+cairns
+auditions
+inflammation
+steamed
+##rocity
+##acion
+##urne
+skyla
+thereof
+watford
+torment
+archdeacon
+transforms
+lulu
+demeanor
+fucked
+serge
+##sor
+mckenna
+minas
+entertainer
+##icide
+caress
+originate
+residue
+##sty
+1740
+##ilised
+##org
+beech
+##wana
+subsidies
+##ghton
+emptied
+gladstone
+ru
+firefighters
+voodoo
+##rcle
+het
+nightingale
+tamara
+edmond
+ingredient
+weaknesses
+silhouette
+285
+compatibility
+withdrawing
+hampson
+##mona
+anguish
+giggling
+##mber
+bookstore
+##jiang
+southernmost
+tilting
+##vance
+bai
+economical
+rf
+briefcase
+dreadful
+hinted
+projections
+shattering
+totaling
+##rogate
+analogue
+indicted
+periodical
+fullback
+##dman
+haynes
+##tenberg
+##ffs
+##ishment
+1745
+thirst
+stumble
+penang
+vigorous
+##ddling
+##kor
+##lium
+octave
+##ove
+##enstein
+##inen
+##ones
+siberian
+##uti
+cbn
+repeal
+swaying
+##vington
+khalid
+tanaka
+unicorn
+otago
+plastered
+lobe
+riddle
+##rella
+perch
+##ishing
+croydon
+filtered
+graeme
+tripoli
+##ossa
+crocodile
+##chers
+sufi
+mined
+##tung
+inferno
+lsu
+##phi
+swelled
+utilizes
+£2
+cale
+periodicals
+styx
+hike
+informally
+coop
+lund
+##tidae
+ala
+hen
+qui
+transformations
+disposed
+sheath
+chickens
+##cade
+fitzroy
+sas
+silesia
+unacceptable
+odisha
+1650
+sabrina
+pe
+spokane
+ratios
+athena
+massage
+shen
+dilemma
+##drum
+##riz
+##hul
+corona
+doubtful
+niall
+##pha
+##bino
+fines
+cite
+acknowledging
+bangor
+ballard
+bathurst
+##resh
+huron
+mustered
+alzheimer
+garments
+kinase
+tyre
+warship
+##cp
+flashback
+pulmonary
+braun
+cheat
+kamal
+cyclists
+constructions
+grenades
+ndp
+traveller
+excuses
+stomped
+signalling
+trimmed
+futsal
+mosques
+relevance
+##wine
+wta
+##23
+##vah
+##lter
+hoc
+##riding
+optimistic
+##´s
+deco
+sim
+interacting
+rejecting
+moniker
+waterways
+##ieri
+##oku
+mayors
+gdansk
+outnumbered
+pearls
+##ended
+##hampton
+fairs
+totals
+dominating
+262
+notions
+stairway
+compiling
+pursed
+commodities
+grease
+yeast
+##jong
+carthage
+griffiths
+residual
+amc
+contraction
+laird
+sapphire
+##marine
+##ivated
+amalgamation
+dissolve
+inclination
+lyle
+packaged
+altitudes
+suez
+canons
+graded
+lurched
+narrowing
+boasts
+guise
+wed
+enrico
+##ovsky
+rower
+scarred
+bree
+cub
+iberian
+protagonists
+bargaining
+proposing
+trainers
+voyages
+vans
+fishes
+##aea
+##ivist
+##verance
+encryption
+artworks
+kazan
+sabre
+cleopatra
+hepburn
+rotting
+supremacy
+mecklenburg
+##brate
+burrows
+hazards
+outgoing
+flair
+organizes
+##ctions
+scorpion
+##usions
+boo
+234
+chevalier
+dunedin
+slapping
+##34
+ineligible
+pensions
+##38
+##omic
+manufactures
+emails
+bismarck
+238
+weakening
+blackish
+ding
+mcgee
+quo
+##rling
+northernmost
+xx
+manpower
+greed
+sampson
+clicking
+##ange
+##horpe
+##inations
+##roving
+torre
+##eptive
+##moral
+symbolism
+38th
+asshole
+meritorious
+outfits
+splashed
+biographies
+sprung
+astros
+##tale
+302
+737
+filly
+raoul
+nw
+tokugawa
+linden
+clubhouse
+##apa
+tracts
+romano
+##pio
+putin
+tags
+##note
+chained
+dickson
+gunshot
+moe
+gunn
+rashid
+##tails
+zipper
+##bas
+##nea
+contrasted
+##ply
+##udes
+plum
+pharaoh
+##pile
+aw
+comedies
+ingrid
+sandwiches
+subdivisions
+1100
+mariana
+nokia
+kamen
+hz
+delaney
+veto
+herring
+##words
+possessive
+outlines
+##roup
+siemens
+stairwell
+rc
+gallantry
+messiah
+palais
+yells
+233
+zeppelin
+##dm
+bolivar
+##cede
+smackdown
+mckinley
+##mora
+##yt
+muted
+geologic
+finely
+unitary
+avatar
+hamas
+maynard
+rees
+bog
+contrasting
+##rut
+liv
+chico
+disposition
+pixel
+##erate
+becca
+dmitry
+yeshiva
+narratives
+##lva
+##ulton
+mercenary
+sharpe
+tempered
+navigate
+stealth
+amassed
+keynes
+##lini
+untouched
+##rrie
+havoc
+lithium
+##fighting
+abyss
+graf
+southward
+wolverine
+balloons
+implements
+ngos
+transitions
+##icum
+ambushed
+concacaf
+dormant
+economists
+##dim
+costing
+csi
+rana
+universite
+boulders
+verity
+##llon
+collin
+mellon
+misses
+cypress
+fluorescent
+lifeless
+spence
+##ulla
+crewe
+shepard
+pak
+revelations
+##م
+jolly
+gibbons
+paw
+##dro
+##quel
+freeing
+##test
+shack
+fries
+palatine
+##51
+##hiko
+accompaniment
+cruising
+recycled
+##aver
+erwin
+sorting
+synthesizers
+dyke
+realities
+sg
+strides
+enslaved
+wetland
+##ghan
+competence
+gunpowder
+grassy
+maroon
+reactors
+objection
+##oms
+carlson
+gearbox
+macintosh
+radios
+shelton
+##sho
+clergyman
+prakash
+254
+mongols
+trophies
+oricon
+228
+stimuli
+twenty20
+cantonese
+cortes
+mirrored
+##saurus
+bhp
+cristina
+melancholy
+##lating
+enjoyable
+nuevo
+##wny
+downfall
+schumacher
+##ind
+banging
+lausanne
+rumbled
+paramilitary
+reflex
+ax
+amplitude
+migratory
+##gall
+##ups
+midi
+barnard
+lastly
+sherry
+##hp
+##nall
+keystone
+##kra
+carleton
+slippery
+##53
+coloring
+foe
+socket
+otter
+##rgos
+mats
+##tose
+consultants
+bafta
+bison
+topping
+##km
+490
+primal
+abandonment
+transplant
+atoll
+hideous
+mort
+pained
+reproduced
+tae
+howling
+##turn
+unlawful
+billionaire
+hotter
+poised
+lansing
+##chang
+dinamo
+retro
+messing
+nfc
+domesday
+##mina
+blitz
+timed
+##athing
+##kley
+ascending
+gesturing
+##izations
+signaled
+tis
+chinatown
+mermaid
+savanna
+jameson
+##aint
+catalina
+##pet
+##hers
+cochrane
+cy
+chatting
+##kus
+alerted
+computation
+mused
+noelle
+majestic
+mohawk
+campo
+octagonal
+##sant
+##hend
+241
+aspiring
+##mart
+comprehend
+iona
+paralyzed
+shimmering
+swindon
+rhone
+##eley
+reputed
+configurations
+pitchfork
+agitation
+francais
+gillian
+lipstick
+##ilo
+outsiders
+pontifical
+resisting
+bitterness
+sewer
+rockies
+##edd
+##ucher
+misleading
+1756
+exiting
+galloway
+##nging
+risked
+##heart
+246
+commemoration
+schultz
+##rka
+integrating
+##rsa
+poses
+shrieked
+##weiler
+guineas
+gladys
+jerking
+owls
+goldsmith
+nightly
+penetrating
+##unced
+lia
+##33
+ignited
+betsy
+##aring
+##thorpe
+follower
+vigorously
+##rave
+coded
+kiran
+knit
+zoology
+tbilisi
+##28
+##bered
+repository
+govt
+deciduous
+dino
+growling
+##bba
+enhancement
+unleashed
+chanting
+pussy
+biochemistry
+##eric
+kettle
+repression
+toxicity
+nrhp
+##arth
+##kko
+##bush
+ernesto
+commended
+outspoken
+242
+mca
+parchment
+sms
+kristen
+##aton
+bisexual
+raked
+glamour
+navajo
+a2
+conditioned
+showcased
+##hma
+spacious
+youthful
+##esa
+usl
+appliances
+junta
+brest
+layne
+conglomerate
+enchanted
+chao
+loosened
+picasso
+circulating
+inspect
+montevideo
+##centric
+##kti
+piazza
+spurred
+##aith
+bari
+freedoms
+poultry
+stamford
+lieu
+##ect
+indigo
+sarcastic
+bahia
+stump
+attach
+dvds
+frankenstein
+lille
+approx
+scriptures
+pollen
+##script
+nmi
+overseen
+##ivism
+tides
+proponent
+newmarket
+inherit
+milling
+##erland
+centralized
+##rou
+distributors
+credentials
+drawers
+abbreviation
+##lco
+##xon
+downing
+uncomfortably
+ripe
+##oes
+erase
+franchises
+##ever
+populace
+##bery
+##khar
+decomposition
+pleas
+##tet
+daryl
+sabah
+##stle
+##wide
+fearless
+genie
+lesions
+annette
+##ogist
+oboe
+appendix
+nair
+dripped
+petitioned
+maclean
+mosquito
+parrot
+rpg
+hampered
+1648
+operatic
+reservoirs
+##tham
+irrelevant
+jolt
+summarized
+##fp
+medallion
+##taff
+##−
+clawed
+harlow
+narrower
+goddard
+marcia
+bodied
+fremont
+suarez
+altering
+tempest
+mussolini
+porn
+##isms
+sweetly
+oversees
+walkers
+solitude
+grimly
+shrines
+hk
+ich
+supervisors
+hostess
+dietrich
+legitimacy
+brushes
+expressive
+##yp
+dissipated
+##rse
+localized
+systemic
+##nikov
+gettysburg
+##js
+##uaries
+dialogues
+muttering
+251
+housekeeper
+sicilian
+discouraged
+##frey
+beamed
+kaladin
+halftime
+kidnap
+##amo
+##llet
+1754
+synonymous
+depleted
+instituto
+insulin
+reprised
+##opsis
+clashed
+##ctric
+interrupting
+radcliffe
+insisting
+medici
+1715
+ejected
+playfully
+turbulent
+##47
+starvation
+##rini
+shipment
+rebellious
+petersen
+verification
+merits
+##rified
+cakes
+##charged
+1757
+milford
+shortages
+spying
+fidelity
+##aker
+emitted
+storylines
+harvested
+seismic
+##iform
+cheung
+kilda
+theoretically
+barbie
+lynx
+##rgy
+##tius
+goblin
+mata
+poisonous
+##nburg
+reactive
+residues
+obedience
+##евич
+conjecture
+##rac
+401
+hating
+sixties
+kicker
+moaning
+motown
+##bha
+emancipation
+neoclassical
+##hering
+consoles
+ebert
+professorship
+##tures
+sustaining
+assaults
+obeyed
+affluent
+incurred
+tornadoes
+##eber
+##zow
+emphasizing
+highlanders
+cheated
+helmets
+##ctus
+internship
+terence
+bony
+executions
+legislators
+berries
+peninsular
+tinged
+##aco
+1689
+amplifier
+corvette
+ribbons
+lavish
+pennant
+##lander
+worthless
+##chfield
+##forms
+mariano
+pyrenees
+expenditures
+##icides
+chesterfield
+mandir
+tailor
+39th
+sergey
+nestled
+willed
+aristocracy
+devotees
+goodnight
+raaf
+rumored
+weaponry
+remy
+appropriations
+harcourt
+burr
+riaa
+##lence
+limitation
+unnoticed
+guo
+soaking
+swamps
+##tica
+collapsing
+tatiana
+descriptive
+brigham
+psalm
+##chment
+maddox
+##lization
+patti
+caliph
+##aja
+akron
+injuring
+serra
+##ganj
+basins
+##sari
+astonished
+launcher
+##church
+hilary
+wilkins
+sewing
+##sf
+stinging
+##fia
+##ncia
+underwood
+startup
+##ition
+compilations
+vibrations
+embankment
+jurist
+##nity
+bard
+juventus
+groundwater
+kern
+palaces
+helium
+boca
+cramped
+marissa
+soto
+##worm
+jae
+princely
+##ggy
+faso
+bazaar
+warmly
+##voking
+229
+pairing
+##lite
+##grate
+##nets
+wien
+freaked
+ulysses
+rebirth
+##alia
+##rent
+mummy
+guzman
+jimenez
+stilled
+##nitz
+trajectory
+tha
+woken
+archival
+professions
+##pts
+##pta
+hilly
+shadowy
+shrink
+##bolt
+norwood
+glued
+migrate
+stereotypes
+devoid
+##pheus
+625
+evacuate
+horrors
+infancy
+gotham
+knowles
+optic
+downloaded
+sachs
+kingsley
+parramatta
+darryl
+mor
+##onale
+shady
+commence
+confesses
+kan
+##meter
+##placed
+marlborough
+roundabout
+regents
+frigates
+io
+##imating
+gothenburg
+revoked
+carvings
+clockwise
+convertible
+intruder
+##sche
+banged
+##ogo
+vicky
+bourgeois
+##mony
+dupont
+footing
+##gum
+pd
+##real
+buckle
+yun
+penthouse
+sane
+720
+serviced
+stakeholders
+neumann
+bb
+##eers
+comb
+##gam
+catchment
+pinning
+rallies
+typing
+##elles
+forefront
+freiburg
+sweetie
+giacomo
+widowed
+goodwill
+worshipped
+aspirations
+midday
+##vat
+fishery
+##trick
+bournemouth
+turk
+243
+hearth
+ethanol
+guadalajara
+murmurs
+sl
+##uge
+afforded
+scripted
+##hta
+wah
+##jn
+coroner
+translucent
+252
+memorials
+puck
+progresses
+clumsy
+##race
+315
+candace
+recounted
+##27
+##slin
+##uve
+filtering
+##mac
+howl
+strata
+heron
+leveled
+##ays
+dubious
+##oja
+##т
+##wheel
+citations
+exhibiting
+##laya
+##mics
+##pods
+turkic
+##lberg
+injunction
+##ennial
+##mit
+antibodies
+##44
+organise
+##rigues
+cardiovascular
+cushion
+inverness
+##zquez
+dia
+cocoa
+sibling
+##tman
+##roid
+expanse
+feasible
+tunisian
+algiers
+##relli
+rus
+bloomberg
+dso
+westphalia
+bro
+tacoma
+281
+downloads
+##ours
+konrad
+duran
+##hdi
+continuum
+jett
+compares
+legislator
+secession
+##nable
+##gues
+##zuka
+translating
+reacher
+##gley
+##ła
+aleppo
+##agi
+tc
+orchards
+trapping
+linguist
+versatile
+drumming
+postage
+calhoun
+superiors
+##mx
+barefoot
+leary
+##cis
+ignacio
+alfa
+kaplan
+##rogen
+bratislava
+mori
+##vot
+disturb
+haas
+313
+cartridges
+gilmore
+radiated
+salford
+tunic
+hades
+##ulsive
+archeological
+delilah
+magistrates
+auditioned
+brewster
+charters
+empowerment
+blogs
+cappella
+dynasties
+iroquois
+whipping
+##krishna
+raceway
+truths
+myra
+weaken
+judah
+mcgregor
+##horse
+mic
+refueling
+37th
+burnley
+bosses
+markus
+premio
+query
+##gga
+dunbar
+##economic
+darkest
+lyndon
+sealing
+commendation
+reappeared
+##mun
+addicted
+ezio
+slaughtered
+satisfactory
+shuffle
+##eves
+##thic
+##uj
+fortification
+warrington
+##otto
+resurrected
+fargo
+mane
+##utable
+##lei
+##space
+foreword
+ox
+##aris
+##vern
+abrams
+hua
+##mento
+sakura
+##alo
+uv
+sentimental
+##skaya
+midfield
+##eses
+sturdy
+scrolls
+macleod
+##kyu
+entropy
+##lance
+mitochondrial
+cicero
+excelled
+thinner
+convoys
+perceive
+##oslav
+##urable
+systematically
+grind
+burkina
+287
+##tagram
+ops
+##aman
+guantanamo
+##cloth
+##tite
+forcefully
+wavy
+##jou
+pointless
+##linger
+##tze
+layton
+portico
+superficial
+clerical
+outlaws
+##hism
+burials
+muir
+##inn
+creditors
+hauling
+rattle
+##leg
+calais
+monde
+archers
+reclaimed
+dwell
+wexford
+hellenic
+falsely
+remorse
+##tek
+dough
+furnishings
+##uttered
+gabon
+neurological
+novice
+##igraphy
+contemplated
+pulpit
+nightstand
+saratoga
+##istan
+documenting
+pulsing
+taluk
+##firmed
+busted
+marital
+##rien
+disagreements
+wasps
+##yes
+hodge
+mcdonnell
+mimic
+fran
+pendant
+dhabi
+musa
+##nington
+congratulations
+argent
+darrell
+concussion
+losers
+regrets
+thessaloniki
+reversal
+donaldson
+hardwood
+thence
+achilles
+ritter
+##eran
+demonic
+jurgen
+prophets
+goethe
+eki
+classmate
+buff
+##cking
+yank
+irrational
+##inging
+perished
+seductive
+qur
+sourced
+##crat
+##typic
+mustard
+ravine
+barre
+horizontally
+characterization
+phylogenetic
+boise
+##dit
+##runner
+##tower
+brutally
+intercourse
+seduce
+##bbing
+fay
+ferris
+ogden
+amar
+nik
+unarmed
+##inator
+evaluating
+kyrgyzstan
+sweetness
+##lford
+##oki
+mccormick
+meiji
+notoriety
+stimulate
+disrupt
+figuring
+instructional
+mcgrath
+##zoo
+groundbreaking
+##lto
+flinch
+khorasan
+agrarian
+bengals
+mixer
+radiating
+##sov
+ingram
+pitchers
+nad
+tariff
+##cript
+tata
+##codes
+##emi
+##ungen
+appellate
+lehigh
+##bled
+##giri
+brawl
+duct
+texans
+##ciation
+##ropolis
+skipper
+speculative
+vomit
+doctrines
+stresses
+253
+davy
+graders
+whitehead
+jozef
+timely
+cumulative
+haryana
+paints
+appropriately
+boon
+cactus
+##ales
+##pid
+dow
+legions
+##pit
+perceptions
+1730
+picturesque
+##yse
+periphery
+rune
+wr
+##aha
+celtics
+sentencing
+whoa
+##erin
+confirms
+variance
+425
+moines
+mathews
+spade
+rave
+m1
+fronted
+fx
+blending
+alleging
+reared
+##gl
+237
+##paper
+grassroots
+eroded
+##free
+##physical
+directs
+ordeal
+##sław
+accelerate
+hacker
+rooftop
+##inia
+lev
+buys
+cebu
+devote
+##lce
+specialising
+##ulsion
+choreographed
+repetition
+warehouses
+##ryl
+paisley
+tuscany
+analogy
+sorcerer
+hash
+huts
+shards
+descends
+exclude
+nix
+chaplin
+gaga
+ito
+vane
+##drich
+causeway
+misconduct
+limo
+orchestrated
+glands
+jana
+##kot
+u2
+##mple
+##sons
+branching
+contrasts
+scoop
+longed
+##virus
+chattanooga
+##75
+syrup
+cornerstone
+##tized
+##mind
+##iaceae
+careless
+precedence
+frescoes
+##uet
+chilled
+consult
+modelled
+snatch
+peat
+##thermal
+caucasian
+humane
+relaxation
+spins
+temperance
+##lbert
+occupations
+lambda
+hybrids
+moons
+mp3
+##oese
+247
+rolf
+societal
+yerevan
+ness
+##ssler
+befriended
+mechanized
+nominate
+trough
+boasted
+cues
+seater
+##hom
+bends
+##tangle
+conductors
+emptiness
+##lmer
+eurasian
+adriatic
+tian
+##cie
+anxiously
+lark
+propellers
+chichester
+jock
+ev
+2a
+##holding
+credible
+recounts
+tori
+loyalist
+abduction
+##hoot
+##redo
+nepali
+##mite
+ventral
+tempting
+##ango
+##crats
+steered
+##wice
+javelin
+dipping
+laborers
+prentice
+looming
+titanium
+##ː
+badges
+emir
+tensor
+##ntation
+egyptians
+rash
+denies
+hawthorne
+lombard
+showers
+wehrmacht
+dietary
+trojan
+##reus
+welles
+executing
+horseshoe
+lifeboat
+##lak
+elsa
+infirmary
+nearing
+roberta
+boyer
+mutter
+trillion
+joanne
+##fine
+##oked
+sinks
+vortex
+uruguayan
+clasp
+sirius
+##block
+accelerator
+prohibit
+sunken
+byu
+chronological
+diplomats
+ochreous
+510
+symmetrical
+1644
+maia
+##tology
+salts
+reigns
+atrocities
+##ия
+hess
+bared
+issn
+##vyn
+cater
+saturated
+##cycle
+##isse
+sable
+voyager
+dyer
+yusuf
+##inge
+fountains
+wolff
+##39
+##nni
+engraving
+rollins
+atheist
+ominous
+##ault
+herr
+chariot
+martina
+strung
+##fell
+##farlane
+horrific
+sahib
+gazes
+saetan
+erased
+ptolemy
+##olic
+flushing
+lauderdale
+analytic
+##ices
+530
+navarro
+beak
+gorilla
+herrera
+broom
+guadalupe
+raiding
+sykes
+311
+bsc
+deliveries
+1720
+invasions
+carmichael
+tajikistan
+thematic
+ecumenical
+sentiments
+onstage
+##rians
+##brand
+##sume
+catastrophic
+flanks
+molten
+##arns
+waller
+aimee
+terminating
+##icing
+alternately
+##oche
+nehru
+printers
+outraged
+##eving
+empires
+template
+banners
+repetitive
+za
+##oise
+vegetarian
+##tell
+guiana
+opt
+cavendish
+lucknow
+synthesized
+##hani
+##mada
+finalized
+##ctable
+fictitious
+mayoral
+unreliable
+##enham
+embracing
+peppers
+rbis
+##chio
+##neo
+inhibition
+slashed
+togo
+orderly
+embroidered
+safari
+salty
+236
+barron
+benito
+totaled
+##dak
+pubs
+simulated
+caden
+devin
+tolkien
+momma
+welding
+sesame
+##ept
+gottingen
+hardness
+630
+shaman
+temeraire
+620
+adequately
+pediatric
+##kit
+ck
+assertion
+radicals
+composure
+cadence
+seafood
+beaufort
+lazarus
+mani
+warily
+cunning
+kurdistan
+249
+cantata
+##kir
+ares
+##41
+##clusive
+nape
+townland
+geared
+insulted
+flutter
+boating
+violate
+draper
+dumping
+malmo
+##hh
+##romatic
+firearm
+alta
+bono
+obscured
+##clave
+exceeds
+panorama
+unbelievable
+##train
+preschool
+##essed
+disconnected
+installing
+rescuing
+secretaries
+accessibility
+##castle
+##drive
+##ifice
+##film
+bouts
+slug
+waterway
+mindanao
+##buro
+##ratic
+halves
+##ل
+calming
+liter
+maternity
+adorable
+bragg
+electrification
+mcc
+##dote
+roxy
+schizophrenia
+##body
+munoz
+kaye
+whaling
+239
+mil
+tingling
+tolerant
+##ago
+unconventional
+volcanoes
+##finder
+deportivo
+##llie
+robson
+kaufman
+neuroscience
+wai
+deportation
+masovian
+scraping
+converse
+##bh
+hacking
+bulge
+##oun
+administratively
+yao
+580
+amp
+mammoth
+booster
+claremont
+hooper
+nomenclature
+pursuits
+mclaughlin
+melinda
+##sul
+catfish
+barclay
+substrates
+taxa
+zee
+originals
+kimberly
+packets
+padma
+##ality
+borrowing
+ostensibly
+solvent
+##bri
+##genesis
+##mist
+lukas
+shreveport
+veracruz
+##ь
+##lou
+##wives
+cheney
+tt
+anatolia
+hobbs
+##zyn
+cyclic
+radiant
+alistair
+greenish
+siena
+dat
+independents
+##bation
+conform
+pieter
+hyper
+applicant
+bradshaw
+spores
+telangana
+vinci
+inexpensive
+nuclei
+322
+jang
+nme
+soho
+spd
+##ign
+cradled
+receptionist
+pow
+##43
+##rika
+fascism
+##ifer
+experimenting
+##ading
+##iec
+##region
+345
+jocelyn
+maris
+stair
+nocturnal
+toro
+constabulary
+elgin
+##kker
+msc
+##giving
+##schen
+##rase
+doherty
+doping
+sarcastically
+batter
+maneuvers
+##cano
+##apple
+##gai
+##git
+intrinsic
+##nst
+##stor
+1753
+showtime
+cafes
+gasps
+lviv
+ushered
+##thed
+fours
+restart
+astonishment
+transmitting
+flyer
+shrugs
+##sau
+intriguing
+cones
+dictated
+mushrooms
+medial
+##kovsky
+##elman
+escorting
+gaped
+##26
+godfather
+##door
+##sell
+djs
+recaptured
+timetable
+vila
+1710
+3a
+aerodrome
+mortals
+scientology
+##orne
+angelina
+mag
+convection
+unpaid
+insertion
+intermittent
+lego
+##nated
+endeavor
+kota
+pereira
+##lz
+304
+bwv
+glamorgan
+insults
+agatha
+fey
+##cend
+fleetwood
+mahogany
+protruding
+steamship
+zeta
+##arty
+mcguire
+suspense
+##sphere
+advising
+urges
+##wala
+hurriedly
+meteor
+gilded
+inline
+arroyo
+stalker
+##oge
+excitedly
+revered
+##cure
+earle
+introductory
+##break
+##ilde
+mutants
+puff
+pulses
+reinforcement
+##haling
+curses
+lizards
+stalk
+correlated
+##fixed
+fallout
+macquarie
+##unas
+bearded
+denton
+heaving
+802
+##ocation
+winery
+assign
+dortmund
+##lkirk
+everest
+invariant
+charismatic
+susie
+##elling
+bled
+lesley
+telegram
+sumner
+bk
+##ogen
+##к
+wilcox
+needy
+colbert
+duval
+##iferous
+##mbled
+allotted
+attends
+imperative
+##hita
+replacements
+hawker
+##inda
+insurgency
+##zee
+##eke
+casts
+##yla
+680
+ives
+transitioned
+##pack
+##powering
+authoritative
+baylor
+flex
+cringed
+plaintiffs
+woodrow
+##skie
+drastic
+ape
+aroma
+unfolded
+commotion
+nt
+preoccupied
+theta
+routines
+lasers
+privatization
+wand
+domino
+ek
+clenching
+nsa
+strategically
+showered
+bile
+handkerchief
+pere
+storing
+christophe
+insulting
+316
+nakamura
+romani
+asiatic
+magdalena
+palma
+cruises
+stripping
+405
+konstantin
+soaring
+##berman
+colloquially
+forerunner
+havilland
+incarcerated
+parasites
+sincerity
+##utus
+disks
+plank
+saigon
+##ining
+corbin
+homo
+ornaments
+powerhouse
+##tlement
+chong
+fastened
+feasibility
+idf
+morphological
+usable
+##nish
+##zuki
+aqueduct
+jaguars
+keepers
+##flies
+aleksandr
+faust
+assigns
+ewing
+bacterium
+hurled
+tricky
+hungarians
+integers
+wallis
+321
+yamaha
+##isha
+hushed
+oblivion
+aviator
+evangelist
+friars
+##eller
+monograph
+ode
+##nary
+airplanes
+labourers
+charms
+##nee
+1661
+hagen
+tnt
+rudder
+fiesta
+transcript
+dorothea
+ska
+inhibitor
+maccabi
+retorted
+raining
+encompassed
+clauses
+menacing
+1642
+lineman
+##gist
+vamps
+##ape
+##dick
+gloom
+##rera
+dealings
+easing
+seekers
+##nut
+##pment
+helens
+unmanned
+##anu
+##isson
+basics
+##amy
+##ckman
+adjustments
+1688
+brutality
+horne
+##zell
+sui
+##55
+##mable
+aggregator
+##thal
+rhino
+##drick
+##vira
+counters
+zoom
+##01
+##rting
+mn
+montenegrin
+packard
+##unciation
+##♭
+##kki
+reclaim
+scholastic
+thugs
+pulsed
+##icia
+syriac
+quan
+saddam
+banda
+kobe
+blaming
+buddies
+dissent
+##lusion
+##usia
+corbett
+jaya
+delle
+erratic
+lexie
+##hesis
+435
+amiga
+hermes
+##pressing
+##leen
+chapels
+gospels
+jamal
+##uating
+compute
+revolving
+warp
+##sso
+##thes
+armory
+##eras
+##gol
+antrim
+loki
+##kow
+##asian
+##good
+##zano
+braid
+handwriting
+subdistrict
+funky
+pantheon
+##iculate
+concurrency
+estimation
+improper
+juliana
+##his
+newcomers
+johnstone
+staten
+communicated
+##oco
+##alle
+sausage
+stormy
+##stered
+##tters
+superfamily
+##grade
+acidic
+collateral
+tabloid
+##oped
+##rza
+bladder
+austen
+##ellant
+mcgraw
+##hay
+hannibal
+mein
+aquino
+lucifer
+wo
+badger
+boar
+cher
+christensen
+greenberg
+interruption
+##kken
+jem
+244
+mocked
+bottoms
+cambridgeshire
+##lide
+sprawling
+##bbly
+eastwood
+ghent
+synth
+##buck
+advisers
+##bah
+nominally
+hapoel
+qu
+daggers
+estranged
+fabricated
+towels
+vinnie
+wcw
+misunderstanding
+anglia
+nothin
+unmistakable
+##dust
+##lova
+chilly
+marquette
+truss
+##edge
+##erine
+reece
+##lty
+##chemist
+##connected
+272
+308
+41st
+bash
+raion
+waterfalls
+##ump
+##main
+labyrinth
+queue
+theorist
+##istle
+bharatiya
+flexed
+soundtracks
+rooney
+leftist
+patrolling
+wharton
+plainly
+alleviate
+eastman
+schuster
+topographic
+engages
+immensely
+unbearable
+fairchild
+1620
+dona
+lurking
+parisian
+oliveira
+ia
+indictment
+hahn
+bangladeshi
+##aster
+vivo
+##uming
+##ential
+antonia
+expects
+indoors
+kildare
+harlan
+##logue
+##ogenic
+##sities
+forgiven
+##wat
+childish
+tavi
+##mide
+##orra
+plausible
+grimm
+successively
+scooted
+##bola
+##dget
+##rith
+spartans
+emery
+flatly
+azure
+epilogue
+##wark
+flourish
+##iny
+##tracted
+##overs
+##oshi
+bestseller
+distressed
+receipt
+spitting
+hermit
+topological
+##cot
+drilled
+subunit
+francs
+##layer
+eel
+##fk
+##itas
+octopus
+footprint
+petitions
+ufo
+##say
+##foil
+interfering
+leaking
+palo
+##metry
+thistle
+valiant
+##pic
+narayan
+mcpherson
+##fast
+gonzales
+##ym
+##enne
+dustin
+novgorod
+solos
+##zman
+doin
+##raph
+##patient
+##meyer
+soluble
+ashland
+cuffs
+carole
+pendleton
+whistling
+vassal
+##river
+deviation
+revisited
+constituents
+rallied
+rotate
+loomed
+##eil
+##nting
+amateurs
+augsburg
+auschwitz
+crowns
+skeletons
+##cona
+bonnet
+257
+dummy
+globalization
+simeon
+sleeper
+mandal
+differentiated
+##crow
+##mare
+milne
+bundled
+exasperated
+talmud
+owes
+segregated
+##feng
+##uary
+dentist
+piracy
+props
+##rang
+devlin
+##torium
+malicious
+paws
+##laid
+dependency
+##ergy
+##fers
+##enna
+258
+pistons
+rourke
+jed
+grammatical
+tres
+maha
+wig
+512
+ghostly
+jayne
+##achal
+##creen
+##ilis
+##lins
+##rence
+designate
+##with
+arrogance
+cambodian
+clones
+showdown
+throttle
+twain
+##ception
+lobes
+metz
+nagoya
+335
+braking
+##furt
+385
+roaming
+##minster
+amin
+crippled
+##37
+##llary
+indifferent
+hoffmann
+idols
+intimidating
+1751
+261
+influenza
+memo
+onions
+1748
+bandage
+consciously
+##landa
+##rage
+clandestine
+observes
+swiped
+tangle
+##ener
+##jected
+##trum
+##bill
+##lta
+hugs
+congresses
+josiah
+spirited
+##dek
+humanist
+managerial
+filmmaking
+inmate
+rhymes
+debuting
+grimsby
+ur
+##laze
+duplicate
+vigor
+##tf
+republished
+bolshevik
+refurbishment
+antibiotics
+martini
+methane
+newscasts
+royale
+horizons
+levant
+iain
+visas
+##ischen
+paler
+##around
+manifestation
+snuck
+alf
+chop
+futile
+pedestal
+rehab
+##kat
+bmg
+kerman
+res
+fairbanks
+jarrett
+abstraction
+saharan
+##zek
+1746
+procedural
+clearer
+kincaid
+sash
+luciano
+##ffey
+crunch
+helmut
+##vara
+revolutionaries
+##tute
+creamy
+leach
+##mmon
+1747
+permitting
+nes
+plight
+wendell
+##lese
+contra
+ts
+clancy
+ipa
+mach
+staples
+autopsy
+disturbances
+nueva
+karin
+pontiac
+##uding
+proxy
+venerable
+haunt
+leto
+bergman
+expands
+##helm
+wal
+##pipe
+canning
+celine
+cords
+obesity
+##enary
+intrusion
+planner
+##phate
+reasoned
+sequencing
+307
+harrow
+##chon
+##dora
+marred
+mcintyre
+repay
+tarzan
+darting
+248
+harrisburg
+margarita
+repulsed
+##hur
+##lding
+belinda
+hamburger
+novo
+compliant
+runways
+bingham
+registrar
+skyscraper
+ic
+cuthbert
+improvisation
+livelihood
+##corp
+##elial
+admiring
+##dened
+sporadic
+believer
+casablanca
+popcorn
+##29
+asha
+shovel
+##bek
+##dice
+coiled
+tangible
+##dez
+casper
+elsie
+resin
+tenderness
+rectory
+##ivision
+avail
+sonar
+##mori
+boutique
+##dier
+guerre
+bathed
+upbringing
+vaulted
+sandals
+blessings
+##naut
+##utnant
+1680
+306
+foxes
+pia
+corrosion
+hesitantly
+confederates
+crystalline
+footprints
+shapiro
+tirana
+valentin
+drones
+45th
+microscope
+shipments
+texted
+inquisition
+wry
+guernsey
+unauthorized
+resigning
+760
+ripple
+schubert
+stu
+reassure
+felony
+##ardo
+brittle
+koreans
+##havan
+##ives
+dun
+implicit
+tyres
+##aldi
+##lth
+magnolia
+##ehan
+##puri
+##poulos
+aggressively
+fei
+gr
+familiarity
+##poo
+indicative
+##trust
+fundamentally
+jimmie
+overrun
+395
+anchors
+moans
+##opus
+britannia
+armagh
+##ggle
+purposely
+seizing
+##vao
+bewildered
+mundane
+avoidance
+cosmopolitan
+geometridae
+quartermaster
+caf
+415
+chatter
+engulfed
+gleam
+purge
+##icate
+juliette
+jurisprudence
+guerra
+revisions
+##bn
+casimir
+brew
+##jm
+1749
+clapton
+cloudy
+conde
+hermitage
+278
+simulations
+torches
+vincenzo
+matteo
+##rill
+hidalgo
+booming
+westbound
+accomplishment
+tentacles
+unaffected
+##sius
+annabelle
+flopped
+sloping
+##litz
+dreamer
+interceptor
+vu
+##loh
+consecration
+copying
+messaging
+breaker
+climates
+hospitalized
+1752
+torino
+afternoons
+winfield
+witnessing
+##teacher
+breakers
+choirs
+sawmill
+coldly
+##ege
+sipping
+haste
+uninhabited
+conical
+bibliography
+pamphlets
+severn
+edict
+##oca
+deux
+illnesses
+grips
+##pl
+rehearsals
+sis
+thinkers
+tame
+##keepers
+1690
+acacia
+reformer
+##osed
+##rys
+shuffling
+##iring
+##shima
+eastbound
+ionic
+rhea
+flees
+littered
+##oum
+rocker
+vomiting
+groaning
+champ
+overwhelmingly
+civilizations
+paces
+sloop
+adoptive
+##tish
+skaters
+##vres
+aiding
+mango
+##joy
+nikola
+shriek
+##ignon
+pharmaceuticals
+##mg
+tuna
+calvert
+gustavo
+stocked
+yearbook
+##urai
+##mana
+computed
+subsp
+riff
+hanoi
+kelvin
+hamid
+moors
+pastures
+summons
+jihad
+nectar
+##ctors
+bayou
+untitled
+pleasing
+vastly
+republics
+intellect
+##η
+##ulio
+##tou
+crumbling
+stylistic
+sb
+##ی
+consolation
+frequented
+h₂o
+walden
+widows
+##iens
+404
+##ignment
+chunks
+improves
+288
+grit
+recited
+##dev
+snarl
+sociological
+##arte
+##gul
+inquired
+##held
+bruise
+clube
+consultancy
+homogeneous
+hornets
+multiplication
+pasta
+prick
+savior
+##grin
+##kou
+##phile
+yoon
+##gara
+grimes
+vanishing
+cheering
+reacting
+bn
+distillery
+##quisite
+##vity
+coe
+dockyard
+massif
+##jord
+escorts
+voss
+##valent
+byte
+chopped
+hawke
+illusions
+workings
+floats
+##koto
+##vac
+kv
+annapolis
+madden
+##onus
+alvaro
+noctuidae
+##cum
+##scopic
+avenge
+steamboat
+forte
+illustrates
+erika
+##trip
+570
+dew
+nationalities
+bran
+manifested
+thirsty
+diversified
+muscled
+reborn
+##standing
+arson
+##lessness
+##dran
+##logram
+##boys
+##kushima
+##vious
+willoughby
+##phobia
+286
+alsace
+dashboard
+yuki
+##chai
+granville
+myspace
+publicized
+tricked
+##gang
+adjective
+##ater
+relic
+reorganisation
+enthusiastically
+indications
+saxe
+##lassified
+consolidate
+iec
+padua
+helplessly
+ramps
+renaming
+regulars
+pedestrians
+accents
+convicts
+inaccurate
+lowers
+mana
+##pati
+barrie
+bjp
+outta
+someplace
+berwick
+flanking
+invoked
+marrow
+sparsely
+excerpts
+clothed
+rei
+##ginal
+wept
+##straße
+##vish
+alexa
+excel
+##ptive
+membranes
+aquitaine
+creeks
+cutler
+sheppard
+implementations
+ns
+##dur
+fragrance
+budge
+concordia
+magnesium
+marcelo
+##antes
+gladly
+vibrating
+##rral
+##ggles
+montrose
+##omba
+lew
+seamus
+1630
+cocky
+##ament
+##uen
+bjorn
+##rrick
+fielder
+fluttering
+##lase
+methyl
+kimberley
+mcdowell
+reductions
+barbed
+##jic
+##tonic
+aeronautical
+condensed
+distracting
+##promising
+huffed
+##cala
+##sle
+claudius
+invincible
+missy
+pious
+balthazar
+ci
+##lang
+butte
+combo
+orson
+##dication
+myriad
+1707
+silenced
+##fed
+##rh
+coco
+netball
+yourselves
+##oza
+clarify
+heller
+peg
+durban
+etudes
+offender
+roast
+blackmail
+curvature
+##woods
+vile
+309
+illicit
+suriname
+##linson
+overture
+1685
+bubbling
+gymnast
+tucking
+##mming
+##ouin
+maldives
+##bala
+gurney
+##dda
+##eased
+##oides
+backside
+pinto
+jars
+racehorse
+tending
+##rdial
+baronetcy
+wiener
+duly
+##rke
+barbarian
+cupping
+flawed
+##thesis
+bertha
+pleistocene
+puddle
+swearing
+##nob
+##tically
+fleeting
+prostate
+amulet
+educating
+##mined
+##iti
+##tler
+75th
+jens
+respondents
+analytics
+cavaliers
+papacy
+raju
+##iente
+##ulum
+##tip
+funnel
+271
+disneyland
+##lley
+sociologist
+##iam
+2500
+faulkner
+louvre
+menon
+##dson
+276
+##ower
+afterlife
+mannheim
+peptide
+referees
+comedians
+meaningless
+##anger
+##laise
+fabrics
+hurley
+renal
+sleeps
+##bour
+##icle
+breakout
+kristin
+roadside
+animator
+clover
+disdain
+unsafe
+redesign
+##urity
+firth
+barnsley
+portage
+reset
+narrows
+268
+commandos
+expansive
+speechless
+tubular
+##lux
+essendon
+eyelashes
+smashwords
+##yad
+##bang
+##claim
+craved
+sprinted
+chet
+somme
+astor
+wrocław
+orton
+266
+bane
+##erving
+##uing
+mischief
+##amps
+##sund
+scaling
+terre
+##xious
+impairment
+offenses
+undermine
+moi
+soy
+contiguous
+arcadia
+inuit
+seam
+##tops
+macbeth
+rebelled
+##icative
+##iot
+590
+elaborated
+frs
+uniformed
+##dberg
+259
+powerless
+priscilla
+stimulated
+980
+qc
+arboretum
+frustrating
+trieste
+bullock
+##nified
+enriched
+glistening
+intern
+##adia
+locus
+nouvelle
+ollie
+ike
+lash
+starboard
+ee
+tapestry
+headlined
+hove
+rigged
+##vite
+pollock
+##yme
+thrive
+clustered
+cas
+roi
+gleamed
+olympiad
+##lino
+pressured
+regimes
+##hosis
+##lick
+ripley
+##ophone
+kickoff
+gallon
+rockwell
+##arable
+crusader
+glue
+revolutions
+scrambling
+1714
+grover
+##jure
+englishman
+aztec
+263
+contemplating
+coven
+ipad
+preach
+triumphant
+tufts
+##esian
+rotational
+##phus
+328
+falkland
+##brates
+strewn
+clarissa
+rejoin
+environmentally
+glint
+banded
+drenched
+moat
+albanians
+johor
+rr
+maestro
+malley
+nouveau
+shaded
+taxonomy
+v6
+adhere
+bunk
+airfields
+##ritan
+1741
+encompass
+remington
+tran
+##erative
+amelie
+mazda
+friar
+morals
+passions
+##zai
+breadth
+vis
+##hae
+argus
+burnham
+caressing
+insider
+rudd
+##imov
+##mini
+##rso
+italianate
+murderous
+textual
+wainwright
+armada
+bam
+weave
+timer
+##taken
+##nh
+fra
+##crest
+ardent
+salazar
+taps
+tunis
+##ntino
+allegro
+gland
+philanthropic
+##chester
+implication
+##optera
+esq
+judas
+noticeably
+wynn
+##dara
+inched
+indexed
+crises
+villiers
+bandit
+royalties
+patterned
+cupboard
+interspersed
+accessory
+isla
+kendrick
+entourage
+stitches
+##esthesia
+headwaters
+##ior
+interlude
+distraught
+draught
+1727
+##basket
+biased
+sy
+transient
+triad
+subgenus
+adapting
+kidd
+shortstop
+##umatic
+dimly
+spiked
+mcleod
+reprint
+nellie
+pretoria
+windmill
+##cek
+singled
+##mps
+273
+reunite
+##orous
+747
+bankers
+outlying
+##omp
+##ports
+##tream
+apologies
+cosmetics
+patsy
+##deh
+##ocks
+##yson
+bender
+nantes
+serene
+##nad
+lucha
+mmm
+323
+##cius
+##gli
+cmll
+coinage
+nestor
+juarez
+##rook
+smeared
+sprayed
+twitching
+sterile
+irina
+embodied
+juveniles
+enveloped
+miscellaneous
+cancers
+dq
+gulped
+luisa
+crested
+swat
+donegal
+ref
+##anov
+##acker
+hearst
+mercantile
+##lika
+doorbell
+ua
+vicki
+##alla
+##som
+bilbao
+psychologists
+stryker
+sw
+horsemen
+turkmenistan
+wits
+##national
+anson
+mathew
+screenings
+##umb
+rihanna
+##agne
+##nessy
+aisles
+##iani
+##osphere
+hines
+kenton
+saskatoon
+tasha
+truncated
+##champ
+##itan
+mildred
+advises
+fredrik
+interpreting
+inhibitors
+##athi
+spectroscopy
+##hab
+##kong
+karim
+panda
+##oia
+##nail
+##vc
+conqueror
+kgb
+leukemia
+##dity
+arrivals
+cheered
+pisa
+phosphorus
+shielded
+##riated
+mammal
+unitarian
+urgently
+chopin
+sanitary
+##mission
+spicy
+drugged
+hinges
+##tort
+tipping
+trier
+impoverished
+westchester
+##caster
+267
+epoch
+nonstop
+##gman
+##khov
+aromatic
+centrally
+cerro
+##tively
+##vio
+billions
+modulation
+sedimentary
+283
+facilitating
+outrageous
+goldstein
+##eak
+##kt
+ld
+maitland
+penultimate
+pollard
+##dance
+fleets
+spaceship
+vertebrae
+##nig
+alcoholism
+als
+recital
+##bham
+##ference
+##omics
+m2
+##bm
+trois
+##tropical
+##в
+commemorates
+##meric
+marge
+##raction
+1643
+670
+cosmetic
+ravaged
+##ige
+catastrophe
+eng
+##shida
+albrecht
+arterial
+bellamy
+decor
+harmon
+##rde
+bulbs
+synchronized
+vito
+easiest
+shetland
+shielding
+wnba
+##glers
+##ssar
+##riam
+brianna
+cumbria
+##aceous
+##rard
+cores
+thayer
+##nsk
+brood
+hilltop
+luminous
+carts
+keynote
+larkin
+logos
+##cta
+##ا
+##mund
+##quay
+lilith
+tinted
+277
+wrestle
+mobilization
+##uses
+sequential
+siam
+bloomfield
+takahashi
+274
+##ieving
+presenters
+ringo
+blazed
+witty
+##oven
+##ignant
+devastation
+haydn
+harmed
+newt
+therese
+##peed
+gershwin
+molina
+rabbis
+sudanese
+001
+innate
+restarted
+##sack
+##fus
+slices
+wb
+##shah
+enroll
+hypothetical
+hysterical
+1743
+fabio
+indefinite
+warped
+##hg
+exchanging
+525
+unsuitable
+##sboro
+gallo
+1603
+bret
+cobalt
+homemade
+##hunter
+mx
+operatives
+##dhar
+terraces
+durable
+latch
+pens
+whorls
+##ctuated
+##eaux
+billing
+ligament
+succumbed
+##gly
+regulators
+spawn
+##brick
+##stead
+filmfare
+rochelle
+##nzo
+1725
+circumstance
+saber
+supplements
+##nsky
+##tson
+crowe
+wellesley
+carrot
+##9th
+##movable
+primate
+drury
+sincerely
+topical
+##mad
+##rao
+callahan
+kyiv
+smarter
+tits
+undo
+##yeh
+announcements
+anthologies
+barrio
+nebula
+##islaus
+##shaft
+##tyn
+bodyguards
+2021
+assassinate
+barns
+emmett
+scully
+##mah
+##yd
+##eland
+##tino
+##itarian
+demoted
+gorman
+lashed
+prized
+adventist
+writ
+##gui
+alla
+invertebrates
+##ausen
+1641
+amman
+1742
+align
+healy
+redistribution
+##gf
+##rize
+insulation
+##drop
+adherents
+hezbollah
+vitro
+ferns
+yanking
+269
+php
+registering
+uppsala
+cheerleading
+confines
+mischievous
+tully
+##ross
+49th
+docked
+roam
+stipulated
+pumpkin
+##bry
+prompt
+##ezer
+blindly
+shuddering
+craftsmen
+frail
+scented
+katharine
+scramble
+shaggy
+sponge
+helix
+zaragoza
+279
+##52
+43rd
+backlash
+fontaine
+seizures
+posse
+cowan
+nonfiction
+telenovela
+wwii
+hammered
+undone
+##gpur
+encircled
+irs
+##ivation
+artefacts
+oneself
+searing
+smallpox
+##belle
+##osaurus
+shandong
+breached
+upland
+blushing
+rankin
+infinitely
+psyche
+tolerated
+docking
+evicted
+##col
+unmarked
+##lving
+gnome
+lettering
+litres
+musique
+##oint
+benevolent
+##jal
+blackened
+##anna
+mccall
+racers
+tingle
+##ocene
+##orestation
+introductions
+radically
+292
+##hiff
+##باد
+1610
+1739
+munchen
+plead
+##nka
+condo
+scissors
+##sight
+##tens
+apprehension
+##cey
+##yin
+hallmark
+watering
+formulas
+sequels
+##llas
+aggravated
+bae
+commencing
+##building
+enfield
+prohibits
+marne
+vedic
+civilized
+euclidean
+jagger
+beforehand
+blasts
+dumont
+##arney
+##nem
+740
+conversions
+hierarchical
+rios
+simulator
+##dya
+##lellan
+hedges
+oleg
+thrusts
+shadowed
+darby
+maximize
+1744
+gregorian
+##nded
+##routed
+sham
+unspecified
+##hog
+emory
+factual
+##smo
+##tp
+fooled
+##rger
+ortega
+wellness
+marlon
+##oton
+##urance
+casket
+keating
+ley
+enclave
+##ayan
+char
+influencing
+jia
+##chenko
+412
+ammonia
+erebidae
+incompatible
+violins
+cornered
+##arat
+grooves
+astronauts
+columbian
+rampant
+fabrication
+kyushu
+mahmud
+vanish
+##dern
+mesopotamia
+##lete
+ict
+##rgen
+caspian
+kenji
+pitted
+##vered
+999
+grimace
+roanoke
+tchaikovsky
+twinned
+##analysis
+##awan
+xinjiang
+arias
+clemson
+kazakh
+sizable
+1662
+##khand
+##vard
+plunge
+tatum
+vittorio
+##nden
+cholera
+##dana
+##oper
+bracing
+indifference
+projectile
+superliga
+##chee
+realises
+upgrading
+299
+porte
+retribution
+##vies
+nk
+stil
+##resses
+ama
+bureaucracy
+blackberry
+bosch
+testosterone
+collapses
+greer
+##pathic
+ioc
+fifties
+malls
+##erved
+bao
+baskets
+adolescents
+siegfried
+##osity
+##tosis
+mantra
+detecting
+existent
+fledgling
+##cchi
+dissatisfied
+gan
+telecommunication
+mingled
+sobbed
+6000
+controversies
+outdated
+taxis
+##raus
+fright
+slams
+##lham
+##fect
+##tten
+detectors
+fetal
+tanned
+##uw
+fray
+goth
+olympian
+skipping
+mandates
+scratches
+sheng
+unspoken
+hyundai
+tracey
+hotspur
+restrictive
+##buch
+americana
+mundo
+##bari
+burroughs
+diva
+vulcan
+##6th
+distinctions
+thumping
+##ngen
+mikey
+sheds
+fide
+rescues
+springsteen
+vested
+valuation
+##ece
+##ely
+pinnacle
+rake
+sylvie
+##edo
+almond
+quivering
+##irus
+alteration
+faltered
+##wad
+51st
+hydra
+ticked
+##kato
+recommends
+##dicated
+antigua
+arjun
+stagecoach
+wilfred
+trickle
+pronouns
+##pon
+aryan
+nighttime
+##anian
+gall
+pea
+stitch
+##hei
+leung
+milos
+##dini
+eritrea
+nexus
+starved
+snowfall
+kant
+parasitic
+cot
+discus
+hana
+strikers
+appleton
+kitchens
+##erina
+##partisan
+##itha
+##vius
+disclose
+metis
+##channel
+1701
+tesla
+##vera
+fitch
+1735
+blooded
+##tila
+decimal
+##tang
+##bai
+cyclones
+eun
+bottled
+peas
+pensacola
+basha
+bolivian
+crabs
+boil
+lanterns
+partridge
+roofed
+1645
+necks
+##phila
+opined
+patting
+##kla
+##lland
+chuckles
+volta
+whereupon
+##nche
+devout
+euroleague
+suicidal
+##dee
+inherently
+involuntary
+knitting
+nasser
+##hide
+puppets
+colourful
+courageous
+southend
+stills
+miraculous
+hodgson
+richer
+rochdale
+ethernet
+greta
+uniting
+prism
+umm
+##haya
+##itical
+##utation
+deterioration
+pointe
+prowess
+##ropriation
+lids
+scranton
+billings
+subcontinent
+##koff
+##scope
+brute
+kellogg
+psalms
+degraded
+##vez
+stanisław
+##ructured
+ferreira
+pun
+astonishing
+gunnar
+##yat
+arya
+prc
+gottfried
+##tight
+excursion
+##ographer
+dina
+##quil
+##nare
+huffington
+illustrious
+wilbur
+gundam
+verandah
+##zard
+naacp
+##odle
+constructive
+fjord
+kade
+##naud
+generosity
+thrilling
+baseline
+cayman
+frankish
+plastics
+accommodations
+zoological
+##fting
+cedric
+qb
+motorized
+##dome
+##otted
+squealed
+tackled
+canucks
+budgets
+situ
+asthma
+dail
+gabled
+grasslands
+whimpered
+writhing
+judgments
+##65
+minnie
+pv
+##carbon
+bananas
+grille
+domes
+monique
+odin
+maguire
+markham
+tierney
+##estra
+##chua
+libel
+poke
+speedy
+atrium
+laval
+notwithstanding
+##edly
+fai
+kala
+##sur
+robb
+##sma
+listings
+luz
+supplementary
+tianjin
+##acing
+enzo
+jd
+ric
+scanner
+croats
+transcribed
+##49
+arden
+cv
+##hair
+##raphy
+##lver
+##uy
+357
+seventies
+staggering
+alam
+horticultural
+hs
+regression
+timbers
+blasting
+##ounded
+montagu
+manipulating
+##cit
+catalytic
+1550
+troopers
+##meo
+condemnation
+fitzpatrick
+##oire
+##roved
+inexperienced
+1670
+castes
+##lative
+outing
+314
+dubois
+flicking
+quarrel
+ste
+learners
+1625
+iq
+whistled
+##class
+282
+classify
+tariffs
+temperament
+355
+folly
+liszt
+##yles
+immersed
+jordanian
+ceasefire
+apparel
+extras
+maru
+fished
+##bio
+harta
+stockport
+assortment
+craftsman
+paralysis
+transmitters
+##cola
+blindness
+##wk
+fatally
+proficiency
+solemnly
+##orno
+repairing
+amore
+groceries
+ultraviolet
+##chase
+schoolhouse
+##tua
+resurgence
+nailed
+##otype
+##×
+ruse
+saliva
+diagrams
+##tructing
+albans
+rann
+thirties
+1b
+antennas
+hilarious
+cougars
+paddington
+stats
+##eger
+breakaway
+ipod
+reza
+authorship
+prohibiting
+scoffed
+##etz
+##ttle
+conscription
+defected
+trondheim
+##fires
+ivanov
+keenan
+##adan
+##ciful
+##fb
+##slow
+locating
+##ials
+##tford
+cadiz
+basalt
+blankly
+interned
+rags
+rattling
+##tick
+carpathian
+reassured
+sync
+bum
+guildford
+iss
+staunch
+##onga
+astronomers
+sera
+sofie
+emergencies
+susquehanna
+##heard
+duc
+mastery
+vh1
+williamsburg
+bayer
+buckled
+craving
+##khan
+##rdes
+bloomington
+##write
+alton
+barbecue
+##bians
+justine
+##hri
+##ndt
+delightful
+smartphone
+newtown
+photon
+retrieval
+peugeot
+hissing
+##monium
+##orough
+flavors
+lighted
+relaunched
+tainted
+##games
+##lysis
+anarchy
+microscopic
+hopping
+adept
+evade
+evie
+##beau
+inhibit
+sinn
+adjustable
+hurst
+intuition
+wilton
+cisco
+44th
+lawful
+lowlands
+stockings
+thierry
+##dalen
+##hila
+##nai
+fates
+prank
+tb
+maison
+lobbied
+provocative
+1724
+4a
+utopia
+##qual
+carbonate
+gujarati
+purcell
+##rford
+curtiss
+##mei
+overgrown
+arenas
+mediation
+swallows
+##rnik
+respectful
+turnbull
+##hedron
+##hope
+alyssa
+ozone
+##ʻi
+ami
+gestapo
+johansson
+snooker
+canteen
+cuff
+declines
+empathy
+stigma
+##ags
+##iner
+##raine
+taxpayers
+gui
+volga
+##wright
+##copic
+lifespan
+overcame
+tattooed
+enactment
+giggles
+##ador
+##camp
+barrington
+bribe
+obligatory
+orbiting
+peng
+##enas
+elusive
+sucker
+##vating
+cong
+hardship
+empowered
+anticipating
+estrada
+cryptic
+greasy
+detainees
+planck
+sudbury
+plaid
+dod
+marriott
+kayla
+##ears
+##vb
+##zd
+mortally
+##hein
+cognition
+radha
+319
+liechtenstein
+meade
+richly
+argyle
+harpsichord
+liberalism
+trumpets
+lauded
+tyrant
+salsa
+tiled
+lear
+promoters
+reused
+slicing
+trident
+##chuk
+##gami
+##lka
+cantor
+checkpoint
+##points
+gaul
+leger
+mammalian
+##tov
+##aar
+##schaft
+doha
+frenchman
+nirvana
+##vino
+delgado
+headlining
+##eron
+##iography
+jug
+tko
+1649
+naga
+intersections
+##jia
+benfica
+nawab
+##suka
+ashford
+gulp
+##deck
+##vill
+##rug
+brentford
+frazier
+pleasures
+dunne
+potsdam
+shenzhen
+dentistry
+##tec
+flanagan
+##dorff
+##hear
+chorale
+dinah
+prem
+quezon
+##rogated
+relinquished
+sutra
+terri
+##pani
+flaps
+##rissa
+poly
+##rnet
+homme
+aback
+##eki
+linger
+womb
+##kson
+##lewood
+doorstep
+orthodoxy
+threaded
+westfield
+##rval
+dioceses
+fridays
+subsided
+##gata
+loyalists
+##biotic
+##ettes
+letterman
+lunatic
+prelate
+tenderly
+invariably
+souza
+thug
+winslow
+##otide
+furlongs
+gogh
+jeopardy
+##runa
+pegasus
+##umble
+humiliated
+standalone
+tagged
+##roller
+freshmen
+klan
+##bright
+attaining
+initiating
+transatlantic
+logged
+viz
+##uance
+1723
+combatants
+intervening
+stephane
+chieftain
+despised
+grazed
+317
+cdc
+galveston
+godzilla
+macro
+simulate
+##planes
+parades
+##esses
+960
+##ductive
+##unes
+equator
+overdose
+##cans
+##hosh
+##lifting
+joshi
+epstein
+sonora
+treacherous
+aquatics
+manchu
+responsive
+##sation
+supervisory
+##christ
+##llins
+##ibar
+##balance
+##uso
+kimball
+karlsruhe
+mab
+##emy
+ignores
+phonetic
+reuters
+spaghetti
+820
+almighty
+danzig
+rumbling
+tombstone
+designations
+lured
+outset
+##felt
+supermarkets
+##wt
+grupo
+kei
+kraft
+susanna
+##blood
+comprehension
+genealogy
+##aghan
+##verted
+redding
+##ythe
+1722
+bowing
+##pore
+##roi
+lest
+sharpened
+fulbright
+valkyrie
+sikhs
+##unds
+swans
+bouquet
+merritt
+##tage
+##venting
+commuted
+redhead
+clerks
+leasing
+cesare
+dea
+hazy
+##vances
+fledged
+greenfield
+servicemen
+##gical
+armando
+blackout
+dt
+sagged
+downloadable
+intra
+potion
+pods
+##4th
+##mism
+xp
+attendants
+gambia
+stale
+##ntine
+plump
+asteroids
+rediscovered
+buds
+flea
+hive
+##neas
+1737
+classifications
+debuts
+##eles
+olympus
+scala
+##eurs
+##gno
+##mute
+hummed
+sigismund
+visuals
+wiggled
+await
+pilasters
+clench
+sulfate
+##ances
+bellevue
+enigma
+trainee
+snort
+##sw
+clouded
+denim
+##rank
+##rder
+churning
+hartman
+lodges
+riches
+sima
+##missible
+accountable
+socrates
+regulates
+mueller
+##cr
+1702
+avoids
+solids
+himalayas
+nutrient
+pup
+##jevic
+squat
+fades
+nec
+##lates
+##pina
+##rona
+##ου
+privateer
+tequila
+##gative
+##mpton
+apt
+hornet
+immortals
+##dou
+asturias
+cleansing
+dario
+##rries
+##anta
+etymology
+servicing
+zhejiang
+##venor
+##nx
+horned
+erasmus
+rayon
+relocating
+£10
+##bags
+escalated
+promenade
+stubble
+2010s
+artisans
+axial
+liquids
+mora
+sho
+yoo
+##tsky
+bundles
+oldies
+##nally
+notification
+bastion
+##ths
+sparkle
+##lved
+1728
+leash
+pathogen
+highs
+##hmi
+immature
+880
+gonzaga
+ignatius
+mansions
+monterrey
+sweets
+bryson
+##loe
+polled
+regatta
+brightest
+pei
+rosy
+squid
+hatfield
+payroll
+addict
+meath
+cornerback
+heaviest
+lodging
+##mage
+capcom
+rippled
+##sily
+barnet
+mayhem
+ymca
+snuggled
+rousseau
+##cute
+blanchard
+284
+fragmented
+leighton
+chromosomes
+risking
+##md
+##strel
+##utter
+corinne
+coyotes
+cynical
+hiroshi
+yeomanry
+##ractive
+ebook
+grading
+mandela
+plume
+agustin
+magdalene
+##rkin
+bea
+femme
+trafford
+##coll
+##lun
+##tance
+52nd
+fourier
+upton
+##mental
+camilla
+gust
+iihf
+islamabad
+longevity
+##kala
+feldman
+netting
+##rization
+endeavour
+foraging
+mfa
+orr
+##open
+greyish
+contradiction
+graz
+##ruff
+handicapped
+marlene
+tweed
+oaxaca
+spp
+campos
+miocene
+pri
+configured
+cooks
+pluto
+cozy
+pornographic
+##entes
+70th
+fairness
+glided
+jonny
+lynne
+rounding
+sired
+##emon
+##nist
+remade
+uncover
+##mack
+complied
+lei
+newsweek
+##jured
+##parts
+##enting
+##pg
+293
+finer
+guerrillas
+athenian
+deng
+disused
+stepmother
+accuse
+gingerly
+seduction
+521
+confronting
+##walker
+##going
+gora
+nostalgia
+sabres
+virginity
+wrenched
+##minated
+syndication
+wielding
+eyre
+##56
+##gnon
+##igny
+behaved
+taxpayer
+sweeps
+##growth
+childless
+gallant
+##ywood
+amplified
+geraldine
+scrape
+##ffi
+babylonian
+fresco
+##rdan
+##kney
+##position
+1718
+restricting
+tack
+fukuoka
+osborn
+selector
+partnering
+##dlow
+318
+gnu
+kia
+tak
+whitley
+gables
+##54
+##mania
+mri
+softness
+immersion
+##bots
+##evsky
+1713
+chilling
+insignificant
+pcs
+##uis
+elites
+lina
+purported
+supplemental
+teaming
+##americana
+##dding
+##inton
+proficient
+rouen
+##nage
+##rret
+niccolo
+selects
+##bread
+fluffy
+1621
+gruff
+knotted
+mukherjee
+polgara
+thrash
+nicholls
+secluded
+smoothing
+thru
+corsica
+loaf
+whitaker
+inquiries
+##rrier
+##kam
+indochina
+289
+marlins
+myles
+peking
+##tea
+extracts
+pastry
+superhuman
+connacht
+vogel
+##ditional
+##het
+##udged
+##lash
+gloss
+quarries
+refit
+teaser
+##alic
+##gaon
+20s
+materialized
+sling
+camped
+pickering
+tung
+tracker
+pursuant
+##cide
+cranes
+soc
+##cini
+##typical
+##viere
+anhalt
+overboard
+workout
+chores
+fares
+orphaned
+stains
+##logie
+fenton
+surpassing
+joyah
+triggers
+##itte
+grandmaster
+##lass
+##lists
+clapping
+fraudulent
+ledger
+nagasaki
+##cor
+##nosis
+##tsa
+eucalyptus
+tun
+##icio
+##rney
+##tara
+dax
+heroism
+ina
+wrexham
+onboard
+unsigned
+##dates
+moshe
+galley
+winnie
+droplets
+exiles
+praises
+watered
+noodles
+##aia
+fein
+adi
+leland
+multicultural
+stink
+bingo
+comets
+erskine
+modernized
+canned
+constraint
+domestically
+chemotherapy
+featherweight
+stifled
+##mum
+darkly
+irresistible
+refreshing
+hasty
+isolate
+##oys
+kitchener
+planners
+##wehr
+cages
+yarn
+implant
+toulon
+elects
+childbirth
+yue
+##lind
+##lone
+cn
+rightful
+sportsman
+junctions
+remodeled
+specifies
+##rgh
+291
+##oons
+complimented
+##urgent
+lister
+ot
+##logic
+bequeathed
+cheekbones
+fontana
+gabby
+##dial
+amadeus
+corrugated
+maverick
+resented
+triangles
+##hered
+##usly
+nazareth
+tyrol
+1675
+assent
+poorer
+sectional
+aegean
+##cous
+296
+nylon
+ghanaian
+##egorical
+##weig
+cushions
+forbid
+fusiliers
+obstruction
+somerville
+##scia
+dime
+earrings
+elliptical
+leyte
+oder
+polymers
+timmy
+atm
+midtown
+piloted
+settles
+continual
+externally
+mayfield
+##uh
+enrichment
+henson
+keane
+persians
+1733
+benji
+braden
+pep
+324
+##efe
+contenders
+pepsi
+valet
+##isches
+298
+##asse
+##earing
+goofy
+stroll
+##amen
+authoritarian
+occurrences
+adversary
+ahmedabad
+tangent
+toppled
+dorchester
+1672
+modernism
+marxism
+islamist
+charlemagne
+exponential
+racks
+unicode
+brunette
+mbc
+pic
+skirmish
+##bund
+##lad
+##powered
+##yst
+hoisted
+messina
+shatter
+##ctum
+jedi
+vantage
+##music
+##neil
+clemens
+mahmoud
+corrupted
+authentication
+lowry
+nils
+##washed
+omnibus
+wounding
+jillian
+##itors
+##opped
+serialized
+narcotics
+handheld
+##arm
+##plicity
+intersecting
+stimulating
+##onis
+crate
+fellowships
+hemingway
+casinos
+climatic
+fordham
+copeland
+drip
+beatty
+leaflets
+robber
+brothel
+madeira
+##hedral
+sphinx
+ultrasound
+##vana
+valor
+forbade
+leonid
+villas
+##aldo
+duane
+marquez
+##cytes
+disadvantaged
+forearms
+kawasaki
+reacts
+consular
+lax
+uncles
+uphold
+##hopper
+concepcion
+dorsey
+lass
+##izan
+arching
+passageway
+1708
+researches
+tia
+internationals
+##graphs
+##opers
+distinguishes
+javanese
+divert
+##uven
+plotted
+##listic
+##rwin
+##erik
+##tify
+affirmative
+signifies
+validation
+##bson
+kari
+felicity
+georgina
+zulu
+##eros
+##rained
+##rath
+overcoming
+##dot
+argyll
+##rbin
+1734
+chiba
+ratification
+windy
+earls
+parapet
+##marks
+hunan
+pristine
+astrid
+punta
+##gart
+brodie
+##kota
+##oder
+malaga
+minerva
+rouse
+##phonic
+bellowed
+pagoda
+portals
+reclamation
+##gur
+##odies
+##⁄₄
+parentheses
+quoting
+allergic
+palette
+showcases
+benefactor
+heartland
+nonlinear
+##tness
+bladed
+cheerfully
+scans
+##ety
+##hone
+1666
+girlfriends
+pedersen
+hiram
+sous
+##liche
+##nator
+1683
+##nery
+##orio
+##umen
+bobo
+primaries
+smiley
+##cb
+unearthed
+uniformly
+fis
+metadata
+1635
+ind
+##oted
+recoil
+##titles
+##tura
+##ια
+406
+hilbert
+jamestown
+mcmillan
+tulane
+seychelles
+##frid
+antics
+coli
+fated
+stucco
+##grants
+1654
+bulky
+accolades
+arrays
+caledonian
+carnage
+optimism
+puebla
+##tative
+##cave
+enforcing
+rotherham
+seo
+dunlop
+aeronautics
+chimed
+incline
+zoning
+archduke
+hellenistic
+##oses
+##sions
+candi
+thong
+##ople
+magnate
+rustic
+##rsk
+projective
+slant
+##offs
+danes
+hollis
+vocalists
+##ammed
+congenital
+contend
+gesellschaft
+##ocating
+##pressive
+douglass
+quieter
+##cm
+##kshi
+howled
+salim
+spontaneously
+townsville
+buena
+southport
+##bold
+kato
+1638
+faerie
+stiffly
+##vus
+##rled
+297
+flawless
+realising
+taboo
+##7th
+bytes
+straightening
+356
+jena
+##hid
+##rmin
+cartwright
+berber
+bertram
+soloists
+411
+noses
+417
+coping
+fission
+hardin
+inca
+##cen
+1717
+mobilized
+vhf
+##raf
+biscuits
+curate
+##85
+##anial
+331
+gaunt
+neighbourhoods
+1540
+##abas
+blanca
+bypassed
+sockets
+behold
+coincidentally
+##bane
+nara
+shave
+splinter
+terrific
+##arion
+##erian
+commonplace
+juris
+redwood
+waistband
+boxed
+caitlin
+fingerprints
+jennie
+naturalized
+##ired
+balfour
+craters
+jody
+bungalow
+hugely
+quilt
+glitter
+pigeons
+undertaker
+bulging
+constrained
+goo
+##sil
+##akh
+assimilation
+reworked
+##person
+persuasion
+##pants
+felicia
+##cliff
+##ulent
+1732
+explodes
+##dun
+##inium
+##zic
+lyman
+vulture
+hog
+overlook
+begs
+northwards
+ow
+spoil
+##urer
+fatima
+favorably
+accumulate
+sargent
+sorority
+corresponded
+dispersal
+kochi
+toned
+##imi
+##lita
+internacional
+newfound
+##agger
+##lynn
+##rigue
+booths
+peanuts
+##eborg
+medicare
+muriel
+nur
+##uram
+crates
+millennia
+pajamas
+worsened
+##breakers
+jimi
+vanuatu
+yawned
+##udeau
+carousel
+##hony
+hurdle
+##ccus
+##mounted
+##pod
+rv
+##eche
+airship
+ambiguity
+compulsion
+recapture
+##claiming
+arthritis
+##osomal
+1667
+asserting
+ngc
+sniffing
+dade
+discontent
+glendale
+ported
+##amina
+defamation
+rammed
+##scent
+fling
+livingstone
+##fleet
+875
+##ppy
+apocalyptic
+comrade
+lcd
+##lowe
+cessna
+eine
+persecuted
+subsistence
+demi
+hoop
+reliefs
+710
+coptic
+progressing
+stemmed
+perpetrators
+1665
+priestess
+##nio
+dobson
+ebony
+rooster
+itf
+tortricidae
+##bbon
+##jian
+cleanup
+##jean
+##øy
+1721
+eighties
+taxonomic
+holiness
+##hearted
+##spar
+antilles
+showcasing
+stabilized
+##nb
+gia
+mascara
+michelangelo
+dawned
+##uria
+##vinsky
+extinguished
+fitz
+grotesque
+£100
+##fera
+##loid
+##mous
+barges
+neue
+throbbed
+cipher
+johnnie
+##a1
+##mpt
+outburst
+##swick
+spearheaded
+administrations
+c1
+heartbreak
+pixels
+pleasantly
+##enay
+lombardy
+plush
+##nsed
+bobbie
+##hly
+reapers
+tremor
+xiang
+minogue
+substantive
+hitch
+barak
+##wyl
+kwan
+##encia
+910
+obscene
+elegance
+indus
+surfer
+bribery
+conserve
+##hyllum
+##masters
+horatio
+##fat
+apes
+rebound
+psychotic
+##pour
+iteration
+##mium
+##vani
+botanic
+horribly
+antiques
+dispose
+paxton
+##hli
+##wg
+timeless
+1704
+disregard
+engraver
+hounds
+##bau
+##version
+looted
+uno
+facilitates
+groans
+masjid
+rutland
+antibody
+disqualification
+decatur
+footballers
+quake
+slacks
+48th
+rein
+scribe
+stabilize
+commits
+exemplary
+tho
+##hort
+##chison
+pantry
+traversed
+##hiti
+disrepair
+identifiable
+vibrated
+baccalaureate
+##nnis
+csa
+interviewing
+##iensis
+##raße
+greaves
+wealthiest
+343
+classed
+jogged
+£5
+##58
+##atal
+illuminating
+knicks
+respecting
+##uno
+scrubbed
+##iji
+##dles
+kruger
+moods
+growls
+raider
+silvia
+chefs
+kam
+vr
+cree
+percival
+##terol
+gunter
+counterattack
+defiant
+henan
+ze
+##rasia
+##riety
+equivalence
+submissions
+##fra
+##thor
+bautista
+mechanically
+##heater
+cornice
+herbal
+templar
+##mering
+outputs
+ruining
+ligand
+renumbered
+extravagant
+mika
+blockbuster
+eta
+insurrection
+##ilia
+darkening
+ferocious
+pianos
+strife
+kinship
+##aer
+melee
+##anor
+##iste
+##may
+##oue
+decidedly
+weep
+##jad
+##missive
+##ppel
+354
+puget
+unease
+##gnant
+1629
+hammering
+kassel
+ob
+wessex
+##lga
+bromwich
+egan
+paranoia
+utilization
+##atable
+##idad
+contradictory
+provoke
+##ols
+##ouring
+##tangled
+knesset
+##very
+##lette
+plumbing
+##sden
+##¹
+greensboro
+occult
+sniff
+338
+zev
+beaming
+gamer
+haggard
+mahal
+##olt
+##pins
+mendes
+utmost
+briefing
+gunnery
+##gut
+##pher
+##zh
+##rok
+1679
+khalifa
+sonya
+##boot
+principals
+urbana
+wiring
+##liffe
+##minating
+##rrado
+dahl
+nyu
+skepticism
+np
+townspeople
+ithaca
+lobster
+somethin
+##fur
+##arina
+##−1
+freighter
+zimmerman
+biceps
+contractual
+##herton
+amend
+hurrying
+subconscious
+##anal
+336
+meng
+clermont
+spawning
+##eia
+##lub
+dignitaries
+impetus
+snacks
+spotting
+twigs
+##bilis
+##cz
+##ouk
+libertadores
+nic
+skylar
+##aina
+##firm
+gustave
+asean
+##anum
+dieter
+legislatures
+flirt
+bromley
+trolls
+umar
+##bbies
+##tyle
+blah
+parc
+bridgeport
+crank
+negligence
+##nction
+46th
+constantin
+molded
+bandages
+seriousness
+00pm
+siegel
+carpets
+compartments
+upbeat
+statehood
+##dner
+##edging
+marko
+730
+platt
+##hane
+paving
+##iy
+1738
+abbess
+impatience
+limousine
+nbl
+##talk
+441
+lucille
+mojo
+nightfall
+robbers
+##nais
+karel
+brisk
+calves
+replicate
+ascribed
+telescopes
+##olf
+intimidated
+##reen
+ballast
+specialization
+##sit
+aerodynamic
+caliphate
+rainer
+visionary
+##arded
+epsilon
+##aday
+##onte
+aggregation
+auditory
+boosted
+reunification
+kathmandu
+loco
+robyn
+402
+acknowledges
+appointing
+humanoid
+newell
+redeveloped
+restraints
+##tained
+barbarians
+chopper
+1609
+italiana
+##lez
+##lho
+investigates
+wrestlemania
+##anies
+##bib
+690
+##falls
+creaked
+dragoons
+gravely
+minions
+stupidity
+volley
+##harat
+##week
+musik
+##eries
+##uously
+fungal
+massimo
+semantics
+malvern
+##ahl
+##pee
+discourage
+embryo
+imperialism
+1910s
+profoundly
+##ddled
+jiangsu
+sparkled
+stat
+##holz
+sweatshirt
+tobin
+##iction
+sneered
+##cheon
+##oit
+brit
+causal
+smyth
+##neuve
+diffuse
+perrin
+silvio
+##ipes
+##recht
+detonated
+iqbal
+selma
+##nism
+##zumi
+roasted
+##riders
+tay
+##ados
+##mament
+##mut
+##rud
+840
+completes
+nipples
+cfa
+flavour
+hirsch
+##laus
+calderon
+sneakers
+moravian
+##ksha
+1622
+rq
+294
+##imeters
+bodo
+##isance
+##pre
+##ronia
+anatomical
+excerpt
+##lke
+dh
+kunst
+##tablished
+##scoe
+biomass
+panted
+unharmed
+gael
+housemates
+montpellier
+##59
+coa
+rodents
+tonic
+hickory
+singleton
+##taro
+451
+1719
+aldo
+breaststroke
+dempsey
+och
+rocco
+##cuit
+merton
+dissemination
+midsummer
+serials
+##idi
+haji
+polynomials
+##rdon
+gs
+enoch
+prematurely
+shutter
+taunton
+£3
+##grating
+##inates
+archangel
+harassed
+##asco
+326
+archway
+dazzling
+##ecin
+1736
+sumo
+wat
+##kovich
+1086
+honneur
+##ently
+##nostic
+##ttal
+##idon
+1605
+403
+1716
+blogger
+rents
+##gnan
+hires
+##ikh
+##dant
+howie
+##rons
+handler
+retracted
+shocks
+1632
+arun
+duluth
+kepler
+trumpeter
+##lary
+peeking
+seasoned
+trooper
+##mara
+laszlo
+##iciencies
+##rti
+heterosexual
+##inatory
+##ssion
+indira
+jogging
+##inga
+##lism
+beit
+dissatisfaction
+malice
+##ately
+nedra
+peeling
+##rgeon
+47th
+stadiums
+475
+vertigo
+##ains
+iced
+restroom
+##plify
+##tub
+illustrating
+pear
+##chner
+##sibility
+inorganic
+rappers
+receipts
+watery
+##kura
+lucinda
+##oulos
+reintroduced
+##8th
+##tched
+gracefully
+saxons
+nutritional
+wastewater
+rained
+favourites
+bedrock
+fisted
+hallways
+likeness
+upscale
+##lateral
+1580
+blinds
+prequel
+##pps
+##tama
+deter
+humiliating
+restraining
+tn
+vents
+1659
+laundering
+recess
+rosary
+tractors
+coulter
+federer
+##ifiers
+##plin
+persistence
+##quitable
+geschichte
+pendulum
+quakers
+##beam
+bassett
+pictorial
+buffet
+koln
+##sitor
+drills
+reciprocal
+shooters
+##57
+##cton
+##tees
+converge
+pip
+dmitri
+donnelly
+yamamoto
+aqua
+azores
+demographics
+hypnotic
+spitfire
+suspend
+wryly
+roderick
+##rran
+sebastien
+##asurable
+mavericks
+##fles
+##200
+himalayan
+prodigy
+##iance
+transvaal
+demonstrators
+handcuffs
+dodged
+mcnamara
+sublime
+1726
+crazed
+##efined
+##till
+ivo
+pondered
+reconciled
+shrill
+sava
+##duk
+bal
+cad
+heresy
+jaipur
+goran
+##nished
+341
+lux
+shelly
+whitehall
+##hre
+israelis
+peacekeeping
+##wled
+1703
+demetrius
+ousted
+##arians
+##zos
+beale
+anwar
+backstroke
+raged
+shrinking
+cremated
+##yck
+benign
+towing
+wadi
+darmstadt
+landfill
+parana
+soothe
+colleen
+sidewalks
+mayfair
+tumble
+hepatitis
+ferrer
+superstructure
+##gingly
+##urse
+##wee
+anthropological
+translators
+##mies
+closeness
+hooves
+##pw
+mondays
+##roll
+##vita
+landscaping
+##urized
+purification
+sock
+thorns
+thwarted
+jalan
+tiberius
+##taka
+saline
+##rito
+confidently
+khyber
+sculptors
+##ij
+brahms
+hammersmith
+inspectors
+battista
+fivb
+fragmentation
+hackney
+##uls
+arresting
+exercising
+antoinette
+bedfordshire
+##zily
+dyed
+##hema
+1656
+racetrack
+variability
+##tique
+1655
+austrians
+deteriorating
+madman
+theorists
+aix
+lehman
+weathered
+1731
+decreed
+eruptions
+1729
+flaw
+quinlan
+sorbonne
+flutes
+nunez
+1711
+adored
+downwards
+fable
+rasped
+1712
+moritz
+mouthful
+renegade
+shivers
+stunts
+dysfunction
+restrain
+translit
+327
+pancakes
+##avio
+##cision
+##tray
+351
+vial
+##lden
+bain
+##maid
+##oxide
+chihuahua
+malacca
+vimes
+##rba
+##rnier
+1664
+donnie
+plaques
+##ually
+337
+bangs
+floppy
+huntsville
+loretta
+nikolay
+##otte
+eater
+handgun
+ubiquitous
+##hett
+eras
+zodiac
+1634
+##omorphic
+1820s
+##zog
+cochran
+##bula
+##lithic
+warring
+##rada
+dalai
+excused
+blazers
+mcconnell
+reeling
+bot
+este
+##abi
+geese
+hoax
+taxon
+##bla
+guitarists
+##icon
+condemning
+hunts
+inversion
+moffat
+taekwondo
+##lvis
+1624
+stammered
+##rest
+##rzy
+sousa
+fundraiser
+marylebone
+navigable
+uptown
+cabbage
+daniela
+salman
+shitty
+whimper
+##kian
+##utive
+programmers
+protections
+rm
+##rmi
+##rued
+forceful
+##enes
+fuss
+##tao
+##wash
+brat
+oppressive
+reykjavik
+spartak
+ticking
+##inkles
+##kiewicz
+adolph
+horst
+maui
+protege
+straighten
+cpc
+landau
+concourse
+clements
+resultant
+##ando
+imaginative
+joo
+reactivated
+##rem
+##ffled
+##uising
+consultative
+##guide
+flop
+kaitlyn
+mergers
+parenting
+somber
+##vron
+supervise
+vidhan
+##imum
+courtship
+exemplified
+harmonies
+medallist
+refining
+##rrow
+##ка
+amara
+##hum
+780
+goalscorer
+sited
+overshadowed
+rohan
+displeasure
+secretive
+multiplied
+osman
+##orth
+engravings
+padre
+##kali
+##veda
+miniatures
+mis
+##yala
+clap
+pali
+rook
+##cana
+1692
+57th
+antennae
+astro
+oskar
+1628
+bulldog
+crotch
+hackett
+yucatan
+##sure
+amplifiers
+brno
+ferrara
+migrating
+##gree
+thanking
+turing
+##eza
+mccann
+ting
+andersson
+onslaught
+gaines
+ganga
+incense
+standardization
+##mation
+sentai
+scuba
+stuffing
+turquoise
+waivers
+alloys
+##vitt
+regaining
+vaults
+##clops
+##gizing
+digger
+furry
+memorabilia
+probing
+##iad
+payton
+rec
+deutschland
+filippo
+opaque
+seamen
+zenith
+afrikaans
+##filtration
+disciplined
+inspirational
+##merie
+banco
+confuse
+grafton
+tod
+##dgets
+championed
+simi
+anomaly
+biplane
+##ceptive
+electrode
+##para
+1697
+cleavage
+crossbow
+swirl
+informant
+##lars
+##osta
+afi
+bonfire
+spec
+##oux
+lakeside
+slump
+##culus
+##lais
+##qvist
+##rrigan
+1016
+facades
+borg
+inwardly
+cervical
+xl
+pointedly
+050
+stabilization
+##odon
+chests
+1699
+hacked
+ctv
+orthogonal
+suzy
+##lastic
+gaulle
+jacobite
+rearview
+##cam
+##erted
+ashby
+##drik
+##igate
+##mise
+##zbek
+affectionately
+canine
+disperse
+latham
+##istles
+##ivar
+spielberg
+##orin
+##idium
+ezekiel
+cid
+##sg
+durga
+middletown
+##cina
+customized
+frontiers
+harden
+##etano
+##zzy
+1604
+bolsheviks
+##66
+coloration
+yoko
+##bedo
+briefs
+slabs
+debra
+liquidation
+plumage
+##oin
+blossoms
+dementia
+subsidy
+1611
+proctor
+relational
+jerseys
+parochial
+ter
+##ici
+esa
+peshawar
+cavalier
+loren
+cpi
+idiots
+shamrock
+1646
+dutton
+malabar
+mustache
+##endez
+##ocytes
+referencing
+terminates
+marche
+yarmouth
+##sop
+acton
+mated
+seton
+subtly
+baptised
+beige
+extremes
+jolted
+kristina
+telecast
+##actic
+safeguard
+waldo
+##baldi
+##bular
+endeavors
+sloppy
+subterranean
+##ensburg
+##itung
+delicately
+pigment
+tq
+##scu
+1626
+##ound
+collisions
+coveted
+herds
+##personal
+##meister
+##nberger
+chopra
+##ricting
+abnormalities
+defective
+galician
+lucie
+##dilly
+alligator
+likened
+##genase
+burundi
+clears
+complexion
+derelict
+deafening
+diablo
+fingered
+champaign
+dogg
+enlist
+isotope
+labeling
+mrna
+##erre
+brilliance
+marvelous
+##ayo
+1652
+crawley
+ether
+footed
+dwellers
+deserts
+hamish
+rubs
+warlock
+skimmed
+##lizer
+870
+buick
+embark
+heraldic
+irregularities
+##ajan
+kiara
+##kulam
+##ieg
+antigen
+kowalski
+##lge
+oakley
+visitation
+##mbit
+vt
+##suit
+1570
+murderers
+##miento
+##rites
+chimneys
+##sling
+condemn
+custer
+exchequer
+havre
+##ghi
+fluctuations
+##rations
+dfb
+hendricks
+vaccines
+##tarian
+nietzsche
+biking
+juicy
+##duced
+brooding
+scrolling
+selangor
+##ragan
+352
+annum
+boomed
+seminole
+sugarcane
+##dna
+departmental
+dismissing
+innsbruck
+arteries
+ashok
+batavia
+daze
+kun
+overtook
+##rga
+##tlan
+beheaded
+gaddafi
+holm
+electronically
+faulty
+galilee
+fractures
+kobayashi
+##lized
+gunmen
+magma
+aramaic
+mala
+eastenders
+inference
+messengers
+bf
+##qu
+407
+bathrooms
+##vere
+1658
+flashbacks
+ideally
+misunderstood
+##jali
+##weather
+mendez
+##grounds
+505
+uncanny
+##iii
+1709
+friendships
+##nbc
+sacrament
+accommodated
+reiterated
+logistical
+pebbles
+thumped
+##escence
+administering
+decrees
+drafts
+##flight
+##cased
+##tula
+futuristic
+picket
+intimidation
+winthrop
+##fahan
+interfered
+339
+afar
+francoise
+morally
+uta
+cochin
+croft
+dwarfs
+##bruck
+##dents
+##nami
+biker
+##hner
+##meral
+nano
+##isen
+##ometric
+##pres
+##ан
+brightened
+meek
+parcels
+securely
+gunners
+##jhl
+##zko
+agile
+hysteria
+##lten
+##rcus
+bukit
+champs
+chevy
+cuckoo
+leith
+sadler
+theologians
+welded
+##section
+1663
+jj
+plurality
+xander
+##rooms
+##formed
+shredded
+temps
+intimately
+pau
+tormented
+##lok
+##stellar
+1618
+charred
+ems
+essen
+##mmel
+alarms
+spraying
+ascot
+blooms
+twinkle
+##abia
+##apes
+internment
+obsidian
+##chaft
+snoop
+##dav
+##ooping
+malibu
+##tension
+quiver
+##itia
+hays
+mcintosh
+travers
+walsall
+##ffie
+1623
+beverley
+schwarz
+plunging
+structurally
+m3
+rosenthal
+vikram
+##tsk
+770
+ghz
+##onda
+##tiv
+chalmers
+groningen
+pew
+reckon
+unicef
+##rvis
+55th
+##gni
+1651
+sulawesi
+avila
+cai
+metaphysical
+screwing
+turbulence
+##mberg
+augusto
+samba
+56th
+baffled
+momentary
+toxin
+##urian
+##wani
+aachen
+condoms
+dali
+steppe
+##3d
+##app
+##oed
+##year
+adolescence
+dauphin
+electrically
+inaccessible
+microscopy
+nikita
+##ega
+atv
+##cel
+##enter
+##oles
+##oteric
+##ы
+accountants
+punishments
+wrongly
+bribes
+adventurous
+clinch
+flinders
+southland
+##hem
+##kata
+gough
+##ciency
+lads
+soared
+##ה
+undergoes
+deformation
+outlawed
+rubbish
+##arus
+##mussen
+##nidae
+##rzburg
+arcs
+##ingdon
+##tituted
+1695
+wheelbase
+wheeling
+bombardier
+campground
+zebra
+##lices
+##oj
+##bain
+lullaby
+##ecure
+donetsk
+wylie
+grenada
+##arding
+##ης
+squinting
+eireann
+opposes
+##andra
+maximal
+runes
+##broken
+##cuting
+##iface
+##ror
+##rosis
+additive
+britney
+adultery
+triggering
+##drome
+detrimental
+aarhus
+containment
+jc
+swapped
+vichy
+##ioms
+madly
+##oric
+##rag
+brant
+##ckey
+##trix
+1560
+1612
+broughton
+rustling
+##stems
+##uder
+asbestos
+mentoring
+##nivorous
+finley
+leaps
+##isan
+apical
+pry
+slits
+substitutes
+##dict
+intuitive
+fantasia
+insistent
+unreasonable
+##igen
+##vna
+domed
+hannover
+margot
+ponder
+##zziness
+impromptu
+jian
+lc
+rampage
+stemming
+##eft
+andrey
+gerais
+whichever
+amnesia
+appropriated
+anzac
+clicks
+modifying
+ultimatum
+cambrian
+maids
+verve
+yellowstone
+##mbs
+conservatoire
+##scribe
+adherence
+dinners
+spectra
+imperfect
+mysteriously
+sidekick
+tatar
+tuba
+##aks
+##ifolia
+distrust
+##athan
+##zle
+c2
+ronin
+zac
+##pse
+celaena
+instrumentalist
+scents
+skopje
+##mbling
+comical
+compensated
+vidal
+condor
+intersect
+jingle
+wavelengths
+##urrent
+mcqueen
+##izzly
+carp
+weasel
+422
+kanye
+militias
+postdoctoral
+eugen
+gunslinger
+##ɛ
+faux
+hospice
+##for
+appalled
+derivation
+dwarves
+##elis
+dilapidated
+##folk
+astoria
+philology
+##lwyn
+##otho
+##saka
+inducing
+philanthropy
+##bf
+##itative
+geek
+markedly
+sql
+##yce
+bessie
+indices
+rn
+##flict
+495
+frowns
+resolving
+weightlifting
+tugs
+cleric
+contentious
+1653
+mania
+rms
+##miya
+##reate
+##ruck
+##tucket
+bien
+eels
+marek
+##ayton
+##cence
+discreet
+unofficially
+##ife
+leaks
+##bber
+1705
+332
+dung
+compressor
+hillsborough
+pandit
+shillings
+distal
+##skin
+381
+##tat
+##you
+nosed
+##nir
+mangrove
+undeveloped
+##idia
+textures
+##inho
+##500
+##rise
+ae
+irritating
+nay
+amazingly
+bancroft
+apologetic
+compassionate
+kata
+symphonies
+##lovic
+airspace
+##lch
+930
+gifford
+precautions
+fulfillment
+sevilla
+vulgar
+martinique
+##urities
+looting
+piccolo
+tidy
+##dermott
+quadrant
+armchair
+incomes
+mathematicians
+stampede
+nilsson
+##inking
+##scan
+foo
+quarterfinal
+##ostal
+shang
+shouldered
+squirrels
+##owe
+344
+vinegar
+##bner
+##rchy
+##systems
+delaying
+##trics
+ars
+dwyer
+rhapsody
+sponsoring
+##gration
+bipolar
+cinder
+starters
+##olio
+##urst
+421
+signage
+##nty
+aground
+figurative
+mons
+acquaintances
+duets
+erroneously
+soyuz
+elliptic
+recreated
+##cultural
+##quette
+##ssed
+##tma
+##zcz
+moderator
+scares
+##itaire
+##stones
+##udence
+juniper
+sighting
+##just
+##nsen
+britten
+calabria
+ry
+bop
+cramer
+forsyth
+stillness
+##л
+airmen
+gathers
+unfit
+##umber
+##upt
+taunting
+##rip
+seeker
+streamlined
+##bution
+holster
+schumann
+tread
+vox
+##gano
+##onzo
+strive
+dil
+reforming
+covent
+newbury
+predicting
+##orro
+decorate
+tre
+##puted
+andover
+ie
+asahi
+dept
+dunkirk
+gills
+##tori
+buren
+huskies
+##stis
+##stov
+abstracts
+bets
+loosen
+##opa
+1682
+yearning
+##glio
+##sir
+berman
+effortlessly
+enamel
+napoli
+persist
+##peration
+##uez
+attache
+elisa
+b1
+invitations
+##kic
+accelerating
+reindeer
+boardwalk
+clutches
+nelly
+polka
+starbucks
+##kei
+adamant
+huey
+lough
+unbroken
+adventurer
+embroidery
+inspecting
+stanza
+##ducted
+naia
+taluka
+##pone
+##roids
+chases
+deprivation
+florian
+##jing
+##ppet
+earthly
+##lib
+##ssee
+colossal
+foreigner
+vet
+freaks
+patrice
+rosewood
+triassic
+upstate
+##pkins
+dominates
+ata
+chants
+ks
+vo
+##400
+##bley
+##raya
+##rmed
+555
+agra
+infiltrate
+##ailing
+##ilation
+##tzer
+##uppe
+##werk
+binoculars
+enthusiast
+fujian
+squeak
+##avs
+abolitionist
+almeida
+boredom
+hampstead
+marsden
+rations
+##ands
+inflated
+334
+bonuses
+rosalie
+patna
+##rco
+329
+detachments
+penitentiary
+54th
+flourishing
+woolf
+##dion
+##etched
+papyrus
+##lster
+##nsor
+##toy
+bobbed
+dismounted
+endelle
+inhuman
+motorola
+tbs
+wince
+wreath
+##ticus
+hideout
+inspections
+sanjay
+disgrace
+infused
+pudding
+stalks
+##urbed
+arsenic
+leases
+##hyl
+##rrard
+collarbone
+##waite
+##wil
+dowry
+##bant
+##edance
+genealogical
+nitrate
+salamanca
+scandals
+thyroid
+necessitated
+##!
+##"
+###
+##$
+##%
+##&
+##'
+##(
+##)
+##*
+##+
+##,
+##-
+##.
+##/
+##:
+##;
+##<
+##=
+##>
+##?
+##@
+##[
+##\
+##]
+##^
+##_
+##`
+##{
+##|
+##}
+##~
+##¡
+##¢
+##£
+##¤
+##¥
+##¦
+##§
+##¨
+##©
+##ª
+##«
+##¬
+##®
+##±
+##´
+##µ
+##¶
+##·
+##º
+##»
+##¼
+##¾
+##¿
+##æ
+##ð
+##÷
+##þ
+##đ
+##ħ
+##ŋ
+##œ
+##ƒ
+##ɐ
+##ɑ
+##ɒ
+##ɔ
+##ɕ
+##ə
+##ɡ
+##ɣ
+##ɨ
+##ɪ
+##ɫ
+##ɬ
+##ɯ
+##ɲ
+##ɴ
+##ɹ
+##ɾ
+##ʀ
+##ʁ
+##ʂ
+##ʃ
+##ʉ
+##ʊ
+##ʋ
+##ʌ
+##ʎ
+##ʐ
+##ʑ
+##ʒ
+##ʔ
+##ʰ
+##ʲ
+##ʳ
+##ʷ
+##ʸ
+##ʻ
+##ʼ
+##ʾ
+##ʿ
+##ˈ
+##ˡ
+##ˢ
+##ˣ
+##ˤ
+##β
+##γ
+##δ
+##ε
+##ζ
+##θ
+##κ
+##λ
+##μ
+##ξ
+##ο
+##π
+##ρ
+##σ
+##τ
+##υ
+##φ
+##χ
+##ψ
+##ω
+##б
+##г
+##д
+##ж
+##з
+##м
+##п
+##с
+##у
+##ф
+##х
+##ц
+##ч
+##ш
+##щ
+##ъ
+##э
+##ю
+##ђ
+##є
+##і
+##ј
+##љ
+##њ
+##ћ
+##ӏ
+##ա
+##բ
+##գ
+##դ
+##ե
+##թ
+##ի
+##լ
+##կ
+##հ
+##մ
+##յ
+##ն
+##ո
+##պ
+##ս
+##վ
+##տ
+##ր
+##ւ
+##ք
+##־
+##א
+##ב
+##ג
+##ד
+##ו
+##ז
+##ח
+##ט
+##י
+##ך
+##כ
+##ל
+##ם
+##מ
+##ן
+##נ
+##ס
+##ע
+##ף
+##פ
+##ץ
+##צ
+##ק
+##ר
+##ש
+##ת
+##،
+##ء
+##ب
+##ت
+##ث
+##ج
+##ح
+##خ
+##ذ
+##ز
+##س
+##ش
+##ص
+##ض
+##ط
+##ظ
+##ع
+##غ
+##ـ
+##ف
+##ق
+##ك
+##و
+##ى
+##ٹ
+##پ
+##چ
+##ک
+##گ
+##ں
+##ھ
+##ہ
+##ے
+##अ
+##आ
+##उ
+##ए
+##क
+##ख
+##ग
+##च
+##ज
+##ट
+##ड
+##ण
+##त
+##थ
+##द
+##ध
+##न
+##प
+##ब
+##भ
+##म
+##य
+##र
+##ल
+##व
+##श
+##ष
+##स
+##ह
+##ा
+##ि
+##ी
+##ो
+##।
+##॥
+##ং
+##অ
+##আ
+##ই
+##উ
+##এ
+##ও
+##ক
+##খ
+##গ
+##চ
+##ছ
+##জ
+##ট
+##ড
+##ণ
+##ত
+##থ
+##দ
+##ধ
+##ন
+##প
+##ব
+##ভ
+##ম
+##য
+##র
+##ল
+##শ
+##ষ
+##স
+##হ
+##া
+##ি
+##ী
+##ে
+##க
+##ச
+##ட
+##த
+##ந
+##ன
+##ப
+##ம
+##ய
+##ர
+##ல
+##ள
+##வ
+##ா
+##ி
+##ு
+##ே
+##ை
+##ನ
+##ರ
+##ಾ
+##ක
+##ය
+##ර
+##ල
+##ව
+##ා
+##ก
+##ง
+##ต
+##ท
+##น
+##พ
+##ม
+##ย
+##ร
+##ล
+##ว
+##ส
+##อ
+##า
+##เ
+##་
+##།
+##ག
+##ང
+##ད
+##ན
+##པ
+##བ
+##མ
+##འ
+##ར
+##ལ
+##ས
+##မ
+##ა
+##ბ
+##გ
+##დ
+##ე
+##ვ
+##თ
+##ი
+##კ
+##ლ
+##მ
+##ნ
+##ო
+##რ
+##ს
+##ტ
+##უ
+##ᄀ
+##ᄂ
+##ᄃ
+##ᄅ
+##ᄆ
+##ᄇ
+##ᄉ
+##ᄊ
+##ᄋ
+##ᄌ
+##ᄎ
+##ᄏ
+##ᄐ
+##ᄑ
+##ᄒ
+##ᅡ
+##ᅢ
+##ᅥ
+##ᅦ
+##ᅧ
+##ᅩ
+##ᅪ
+##ᅭ
+##ᅮ
+##ᅯ
+##ᅲ
+##ᅳ
+##ᅴ
+##ᅵ
+##ᆨ
+##ᆫ
+##ᆯ
+##ᆷ
+##ᆸ
+##ᆼ
+##ᴬ
+##ᴮ
+##ᴰ
+##ᴵ
+##ᴺ
+##ᵀ
+##ᵃ
+##ᵇ
+##ᵈ
+##ᵉ
+##ᵍ
+##ᵏ
+##ᵐ
+##ᵒ
+##ᵖ
+##ᵗ
+##ᵘ
+##ᵣ
+##ᵤ
+##ᵥ
+##ᶜ
+##ᶠ
+##‐
+##‑
+##‒
+##–
+##—
+##―
+##‖
+##‘
+##’
+##‚
+##“
+##”
+##„
+##†
+##‡
+##•
+##…
+##‰
+##′
+##″
+##›
+##‿
+##⁄
+##⁰
+##ⁱ
+##⁴
+##⁵
+##⁶
+##⁷
+##⁸
+##⁹
+##⁻
+##ⁿ
+##₅
+##₆
+##₇
+##₈
+##₉
+##₊
+##₍
+##₎
+##ₐ
+##ₑ
+##ₒ
+##ₓ
+##ₕ
+##ₖ
+##ₗ
+##ₘ
+##ₚ
+##ₛ
+##ₜ
+##₤
+##₩
+##€
+##₱
+##₹
+##ℓ
+##№
+##ℝ
+##™
+##⅓
+##⅔
+##←
+##↑
+##→
+##↓
+##↔
+##↦
+##⇄
+##⇌
+##⇒
+##∂
+##∅
+##∆
+##∇
+##∈
+##∗
+##∘
+##√
+##∞
+##∧
+##∨
+##∩
+##∪
+##≈
+##≡
+##≤
+##≥
+##⊂
+##⊆
+##⊕
+##⊗
+##⋅
+##─
+##│
+##■
+##▪
+##●
+##★
+##☆
+##☉
+##♠
+##♣
+##♥
+##♦
+##♯
+##⟨
+##⟩
+##ⱼ
+##⺩
+##⺼
+##⽥
+##、
+##。
+##〈
+##〉
+##《
+##》
+##「
+##」
+##『
+##』
+##〜
+##あ
+##い
+##う
+##え
+##お
+##か
+##き
+##く
+##け
+##こ
+##さ
+##し
+##す
+##せ
+##そ
+##た
+##ち
+##っ
+##つ
+##て
+##と
+##な
+##に
+##ぬ
+##ね
+##の
+##は
+##ひ
+##ふ
+##へ
+##ほ
+##ま
+##み
+##む
+##め
+##も
+##や
+##ゆ
+##よ
+##ら
+##り
+##る
+##れ
+##ろ
+##を
+##ん
+##ァ
+##ア
+##ィ
+##イ
+##ウ
+##ェ
+##エ
+##オ
+##カ
+##キ
+##ク
+##ケ
+##コ
+##サ
+##シ
+##ス
+##セ
+##タ
+##チ
+##ッ
+##ツ
+##テ
+##ト
+##ナ
+##ニ
+##ノ
+##ハ
+##ヒ
+##フ
+##ヘ
+##ホ
+##マ
+##ミ
+##ム
+##メ
+##モ
+##ャ
+##ュ
+##ョ
+##ラ
+##リ
+##ル
+##レ
+##ロ
+##ワ
+##ン
+##・
+##ー
+##一
+##三
+##上
+##下
+##不
+##世
+##中
+##主
+##久
+##之
+##也
+##事
+##二
+##五
+##井
+##京
+##人
+##亻
+##仁
+##介
+##代
+##仮
+##伊
+##会
+##佐
+##侍
+##保
+##信
+##健
+##元
+##光
+##八
+##公
+##内
+##出
+##分
+##前
+##劉
+##力
+##加
+##勝
+##北
+##区
+##十
+##千
+##南
+##博
+##原
+##口
+##古
+##史
+##司
+##合
+##吉
+##同
+##名
+##和
+##囗
+##四
+##国
+##國
+##土
+##地
+##坂
+##城
+##堂
+##場
+##士
+##夏
+##外
+##大
+##天
+##太
+##夫
+##奈
+##女
+##子
+##学
+##宀
+##宇
+##安
+##宗
+##定
+##宣
+##宮
+##家
+##宿
+##寺
+##將
+##小
+##尚
+##山
+##岡
+##島
+##崎
+##川
+##州
+##巿
+##帝
+##平
+##年
+##幸
+##广
+##弘
+##張
+##彳
+##後
+##御
+##德
+##心
+##忄
+##志
+##忠
+##愛
+##成
+##我
+##戦
+##戸
+##手
+##扌
+##政
+##文
+##新
+##方
+##日
+##明
+##星
+##春
+##昭
+##智
+##曲
+##書
+##月
+##有
+##朝
+##木
+##本
+##李
+##村
+##東
+##松
+##林
+##森
+##楊
+##樹
+##橋
+##歌
+##止
+##正
+##武
+##比
+##氏
+##民
+##水
+##氵
+##氷
+##永
+##江
+##沢
+##河
+##治
+##法
+##海
+##清
+##漢
+##瀬
+##火
+##版
+##犬
+##王
+##生
+##田
+##男
+##疒
+##発
+##白
+##的
+##皇
+##目
+##相
+##省
+##真
+##石
+##示
+##社
+##神
+##福
+##禾
+##秀
+##秋
+##空
+##立
+##章
+##竹
+##糹
+##美
+##義
+##耳
+##良
+##艹
+##花
+##英
+##華
+##葉
+##藤
+##行
+##街
+##西
+##見
+##訁
+##語
+##谷
+##貝
+##貴
+##車
+##軍
+##辶
+##道
+##郎
+##郡
+##部
+##都
+##里
+##野
+##金
+##鈴
+##镇
+##長
+##門
+##間
+##阝
+##阿
+##陳
+##陽
+##雄
+##青
+##面
+##風
+##食
+##香
+##馬
+##高
+##龍
+##龸
+##fi
+##fl
+##!
+##(
+##)
+##,
+##-
+##.
+##/
+##:
+##?
+##~
diff --git a/native/annotator/pod_ner/utils.cc b/native/annotator/pod_ner/utils.cc
new file mode 100644
index 0000000..136a996
--- /dev/null
+++ b/native/annotator/pod_ner/utils.cc
@@ -0,0 +1,436 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/pod_ner/utils.h"
+
+#include <algorithm>
+#include <iostream>
+#include <unordered_map>
+
+#include "annotator/model_generated.h"
+#include "annotator/types.h"
+#include "utils/base/logging.h"
+#include "absl/strings/str_cat.h"
+#include "absl/strings/str_split.h"
+
+namespace libtextclassifier3 {
+namespace {
+
+// Returns true if the needle string is contained in the haystack.
+bool StrIsOneOf(const std::string &needle,
+                const std::vector<std::string> &haystack) {
+  return std::find(haystack.begin(), haystack.end(), needle) != haystack.end();
+}
+
+// Finds the wordpiece span of the tokens in the given span.
+WordpieceSpan CodepointSpanToWordpieceSpan(
+    const CodepointSpan &span, const std::vector<Token> &tokens,
+    const std::vector<int32_t> &word_starts, int num_wordpieces) {
+  int span_first_wordpiece_index = 0;
+  int span_last_wordpiece_index = num_wordpieces;
+  for (int i = 0; i < tokens.size(); i++) {
+    if (tokens[i].start <= span.first && span.first < tokens[i].end) {
+      span_first_wordpiece_index = word_starts[i];
+    }
+    if (tokens[i].start <= span.second && span.second <= tokens[i].end) {
+      span_last_wordpiece_index =
+          (i + 1) < word_starts.size() ? word_starts[i + 1] : num_wordpieces;
+      break;
+    }
+  }
+  return WordpieceSpan(span_first_wordpiece_index, span_last_wordpiece_index);
+}
+
+}  // namespace
+
+std::string SaftLabelToCollection(absl::string_view saft_label) {
+  return std::string(saft_label.substr(saft_label.rfind('/') + 1));
+}
+
+namespace internal {
+
+int FindLastFullTokenIndex(const std::vector<int32_t> &word_starts,
+                           int num_wordpieces, int wordpiece_end) {
+  if (word_starts.empty()) {
+    return 0;
+  }
+  if (*word_starts.rbegin() < wordpiece_end &&
+      num_wordpieces <= wordpiece_end) {
+    // Last token.
+    return word_starts.size() - 1;
+  }
+  for (int i = word_starts.size() - 1; i > 0; --i) {
+    if (word_starts[i] <= wordpiece_end) {
+      return (i - 1);
+    }
+  }
+  return 0;
+}
+
+int FindFirstFullTokenIndex(const std::vector<int32_t> &word_starts,
+                            int first_wordpiece_index) {
+  for (int i = 0; i < word_starts.size(); ++i) {
+    if (word_starts[i] == first_wordpiece_index) {
+      return i;
+    } else if (word_starts[i] > first_wordpiece_index) {
+      return std::max(0, i - 1);
+    }
+  }
+
+  return std::max(0, static_cast<int>(word_starts.size()) - 1);
+}
+
+WordpieceSpan ExpandWindowAndAlign(int max_num_wordpieces_in_window,
+                                   int num_wordpieces,
+                                   WordpieceSpan wordpiece_span_to_expand) {
+  if (wordpiece_span_to_expand.length() >= max_num_wordpieces_in_window) {
+    return wordpiece_span_to_expand;
+  }
+  int window_first_wordpiece_index = std::max(
+      0, wordpiece_span_to_expand.begin - ((max_num_wordpieces_in_window -
+                                            wordpiece_span_to_expand.length()) /
+                                           2));
+  if ((window_first_wordpiece_index + max_num_wordpieces_in_window) >
+      num_wordpieces) {
+    window_first_wordpiece_index =
+        std::max(num_wordpieces - max_num_wordpieces_in_window, 0);
+  }
+  return WordpieceSpan(
+      window_first_wordpiece_index,
+      std::min(window_first_wordpiece_index + max_num_wordpieces_in_window,
+               num_wordpieces));
+}
+
+WordpieceSpan FindWordpiecesWindowAroundSpan(
+    const CodepointSpan &span_of_interest, const std::vector<Token> &tokens,
+    const std::vector<int32_t> &word_starts, int num_wordpieces,
+    int max_num_wordpieces_in_window) {
+  WordpieceSpan wordpiece_span_to_expand = CodepointSpanToWordpieceSpan(
+      span_of_interest, tokens, word_starts, num_wordpieces);
+  WordpieceSpan max_wordpiece_span = ExpandWindowAndAlign(
+      max_num_wordpieces_in_window, num_wordpieces, wordpiece_span_to_expand);
+  return max_wordpiece_span;
+}
+
+WordpieceSpan FindFullTokensSpanInWindow(
+    const std::vector<int32_t> &word_starts,
+    const WordpieceSpan &wordpiece_span, int max_num_wordpieces,
+    int num_wordpieces, int *first_token_index, int *num_tokens) {
+  int window_first_wordpiece_index = wordpiece_span.begin;
+  *first_token_index = internal::FindFirstFullTokenIndex(
+      word_starts, window_first_wordpiece_index);
+  window_first_wordpiece_index = word_starts[*first_token_index];
+
+  // Need to update the last index in case the first moved backward.
+  int wordpiece_window_end = std::min(
+      wordpiece_span.end, window_first_wordpiece_index + max_num_wordpieces);
+  int last_token_index;
+  last_token_index = internal::FindLastFullTokenIndex(
+      word_starts, num_wordpieces, wordpiece_window_end);
+  wordpiece_window_end = last_token_index == (word_starts.size() - 1)
+                             ? num_wordpieces
+                             : word_starts[last_token_index + 1];
+
+  *num_tokens = last_token_index - *first_token_index + 1;
+  return WordpieceSpan(window_first_wordpiece_index, wordpiece_window_end);
+}
+
+}  // namespace internal
+
+WindowGenerator::WindowGenerator(const std::vector<int32_t> &wordpiece_indices,
+                                 const std::vector<int32_t> &token_starts,
+                                 const std::vector<Token> &tokens,
+                                 int max_num_wordpieces,
+                                 int sliding_window_overlap,
+                                 const CodepointSpan &span_of_interest)
+    : wordpiece_indices_(&wordpiece_indices),
+      token_starts_(&token_starts),
+      tokens_(&tokens),
+      max_num_effective_wordpieces_(max_num_wordpieces),
+      sliding_window_num_wordpieces_overlap_(sliding_window_overlap) {
+  entire_wordpiece_span_ = internal::FindWordpiecesWindowAroundSpan(
+      span_of_interest, tokens, token_starts, wordpiece_indices.size(),
+      max_num_wordpieces);
+  next_wordpiece_span_ = WordpieceSpan(
+      entire_wordpiece_span_.begin,
+      std::min(entire_wordpiece_span_.begin + max_num_effective_wordpieces_,
+               entire_wordpiece_span_.end));
+  previous_wordpiece_span_ = WordpieceSpan(-1, -1);
+}
+
+bool WindowGenerator::Next(VectorSpan<int32_t> *cur_wordpiece_indices,
+                           VectorSpan<int32_t> *cur_token_starts,
+                           VectorSpan<Token> *cur_tokens) {
+  if (Done()) {
+    return false;
+  }
+  // Update the span to cover full tokens.
+  int cur_first_token_index, cur_num_tokens;
+  next_wordpiece_span_ = internal::FindFullTokensSpanInWindow(
+      *token_starts_, next_wordpiece_span_, max_num_effective_wordpieces_,
+      wordpiece_indices_->size(), &cur_first_token_index, &cur_num_tokens);
+  *cur_token_starts = VectorSpan<int32_t>(
+      token_starts_->begin() + cur_first_token_index,
+      token_starts_->begin() + cur_first_token_index + cur_num_tokens);
+  *cur_tokens = VectorSpan<Token>(
+      tokens_->begin() + cur_first_token_index,
+      tokens_->begin() + cur_first_token_index + cur_num_tokens);
+
+  // Handle the edge case where the tokens are composed of many wordpieces and
+  // the window doesn't advance.
+  if (next_wordpiece_span_.begin <= previous_wordpiece_span_.begin ||
+      next_wordpiece_span_.end <= previous_wordpiece_span_.end) {
+    return false;
+  }
+  previous_wordpiece_span_ = next_wordpiece_span_;
+
+  int next_wordpiece_first = std::max(
+      previous_wordpiece_span_.end - sliding_window_num_wordpieces_overlap_,
+      previous_wordpiece_span_.begin + 1);
+  next_wordpiece_span_ = WordpieceSpan(
+      next_wordpiece_first,
+      std::min(next_wordpiece_first + max_num_effective_wordpieces_,
+               entire_wordpiece_span_.end));
+
+  *cur_wordpiece_indices = VectorSpan<int>(
+      wordpiece_indices_->begin() + previous_wordpiece_span_.begin,
+      wordpiece_indices_->begin() + previous_wordpiece_span_.begin +
+          previous_wordpiece_span_.length());
+
+  return true;
+}
+
+bool ConvertTagsToAnnotatedSpans(const VectorSpan<Token> &tokens,
+                                 const std::vector<std::string> &tags,
+                                 const std::vector<std::string> &label_filter,
+                                 bool relaxed_inside_label_matching,
+                                 bool relaxed_label_category_matching,
+                                 float priority_score,
+                                 std::vector<AnnotatedSpan> *results) {
+  AnnotatedSpan current_span;
+  std::string current_tag_type;
+  if (tags.size() > tokens.size()) {
+    return false;
+  }
+  for (int i = 0; i < tags.size(); i++) {
+    if (tags[i].empty()) {
+      return false;
+    }
+
+    std::vector<absl::string_view> tag_parts = absl::StrSplit(tags[i], '-');
+    TC3_CHECK_GT(tag_parts.size(), 0);
+    if (tag_parts[0].size() != 1) {
+      return false;
+    }
+
+    std::string tag_type = "";
+    if (tag_parts.size() > 2) {
+      // Skip if the current label doesn't match the filter.
+      if (!StrIsOneOf(std::string(tag_parts[1]), label_filter)) {
+        current_tag_type = "";
+        current_span = {};
+        continue;
+      }
+
+      // Relax the matching of the label category if specified.
+      tag_type = relaxed_label_category_matching
+                     ? std::string(tag_parts[2])
+                     : absl::StrCat(tag_parts[1], "-", tag_parts[2]);
+    }
+
+    switch (tag_parts[0][0]) {
+      case 'S': {
+        if (tag_parts.size() != 3) {
+          return false;
+        }
+
+        current_span = {};
+        current_tag_type = "";
+        results->push_back(AnnotatedSpan{
+            {tokens[i].start, tokens[i].end},
+            {{/*arg_collection=*/SaftLabelToCollection(tag_parts[2]),
+              /*arg_score=*/1.0, priority_score}}});
+        break;
+      };
+
+      case 'B': {
+        if (tag_parts.size() != 3) {
+          return false;
+        }
+        current_tag_type = tag_type;
+        current_span = {};
+        current_span.classification.push_back(
+            {/*arg_collection=*/SaftLabelToCollection(tag_parts[2]),
+             /*arg_score=*/1.0, priority_score});
+        current_span.span.first = tokens[i].start;
+        break;
+      };
+
+      case 'I': {
+        if (tag_parts.size() != 3) {
+          return false;
+        }
+        if (!relaxed_inside_label_matching && current_tag_type != tag_type) {
+          current_tag_type = "";
+          current_span = {};
+        }
+        break;
+      }
+
+      case 'E': {
+        if (tag_parts.size() != 3) {
+          return false;
+        }
+        if (!current_tag_type.empty() && current_tag_type == tag_type) {
+          current_span.span.second = tokens[i].end;
+          results->push_back(current_span);
+          current_span = {};
+          current_tag_type = "";
+        }
+        break;
+      };
+
+      case 'O': {
+        current_tag_type = "";
+        current_span = {};
+        break;
+      };
+
+      default: {
+        TC3_LOG(ERROR) << "Unrecognized tag: " << tags[i];
+        return false;
+      }
+    }
+  }
+  return true;
+}
+
+using PodNerModel_::CollectionT;
+using PodNerModel_::LabelT;
+using PodNerModel_::Label_::BoiseType;
+using PodNerModel_::Label_::MentionType;
+
+bool ConvertTagsToAnnotatedSpans(const VectorSpan<Token> &tokens,
+                                 const std::vector<LabelT> &labels,
+                                 const std::vector<CollectionT> &collections,
+                                 const std::vector<MentionType> &mention_filter,
+                                 bool relaxed_inside_label_matching,
+                                 bool relaxed_mention_type_matching,
+                                 std::vector<AnnotatedSpan> *results) {
+  if (labels.size() > tokens.size()) {
+    return false;
+  }
+
+  AnnotatedSpan current_span;
+  std::string current_collection_name = "";
+
+  for (int i = 0; i < labels.size(); i++) {
+    const LabelT &label = labels[i];
+
+    if (label.collection_id < 0 || label.collection_id >= collections.size()) {
+      return false;
+    }
+
+    if (std::find(mention_filter.begin(), mention_filter.end(),
+                  label.mention_type) == mention_filter.end()) {
+      // Skip if the current label doesn't match the filter.
+      current_span = {};
+      current_collection_name = "";
+      continue;
+    }
+
+    switch (label.boise_type) {
+      case BoiseType::BoiseType_SINGLE: {
+        current_span = {};
+        current_collection_name = "";
+        results->push_back(AnnotatedSpan{
+            {tokens[i].start, tokens[i].end},
+            {{/*arg_collection=*/collections[label.collection_id].name,
+              /*arg_score=*/1.0,
+              collections[label.collection_id].single_token_priority_score}}});
+        break;
+      };
+
+      case BoiseType::BoiseType_BEGIN: {
+        current_span = {};
+        current_span.classification.push_back(
+            {/*arg_collection=*/collections[label.collection_id].name,
+             /*arg_score=*/1.0,
+             collections[label.collection_id].multi_token_priority_score});
+        current_span.span.first = tokens[i].start;
+        current_collection_name = collections[label.collection_id].name;
+        break;
+      };
+
+      case BoiseType::BoiseType_INTERMEDIATE: {
+        if (current_collection_name.empty() ||
+            (!relaxed_mention_type_matching &&
+             labels[i - 1].mention_type != label.mention_type) ||
+            (!relaxed_inside_label_matching &&
+             labels[i - 1].collection_id != label.collection_id)) {
+          current_span = {};
+          current_collection_name = "";
+        }
+        break;
+      }
+
+      case BoiseType::BoiseType_END: {
+        if (!current_collection_name.empty() &&
+            current_collection_name == collections[label.collection_id].name &&
+            (relaxed_mention_type_matching ||
+             labels[i - 1].mention_type == label.mention_type)) {
+          current_span.span.second = tokens[i].end;
+          results->push_back(current_span);
+        }
+        current_span = {};
+        current_collection_name = "";
+        break;
+      };
+
+      case BoiseType::BoiseType_O: {
+        current_span = {};
+        current_collection_name = "";
+        break;
+      };
+
+      default: {
+        TC3_LOG(ERROR) << "Unrecognized tag: " << labels[i].boise_type;
+        return false;
+      }
+    }
+  }
+  return true;
+}
+
+bool MergeLabelsIntoLeftSequence(
+    const std::vector<PodNerModel_::LabelT> &labels_right,
+    int index_first_right_tag_in_left,
+    std::vector<PodNerModel_::LabelT> *labels_left) {
+  if (index_first_right_tag_in_left > labels_left->size()) {
+    return false;
+  }
+
+  int overlaping_from_left =
+      (labels_left->size() - index_first_right_tag_in_left) / 2;
+
+  labels_left->resize(index_first_right_tag_in_left + labels_right.size());
+  std::copy(labels_right.begin() + overlaping_from_left, labels_right.end(),
+            labels_left->begin() + index_first_right_tag_in_left +
+                overlaping_from_left);
+  return true;
+}
+
+}  // namespace libtextclassifier3
diff --git a/native/annotator/pod_ner/utils.h b/native/annotator/pod_ner/utils.h
new file mode 100644
index 0000000..6c4a902
--- /dev/null
+++ b/native/annotator/pod_ner/utils.h
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_UTILS_H_
+#define LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_UTILS_H_
+
+#include <string>
+#include <vector>
+
+#include "annotator/model_generated.h"
+#include "annotator/types.h"
+#include "absl/strings/string_view.h"
+
+namespace libtextclassifier3 {
+// Converts saft labels like /saft/person to collection name 'person'.
+std::string SaftLabelToCollection(absl::string_view saft_label);
+
+struct WordpieceSpan {
+  // Beginning index is inclusive, end index is exclusive.
+  WordpieceSpan() : begin(0), end(0) {}
+  WordpieceSpan(int begin, int end) : begin(begin), end(end) {}
+  int begin;
+  int end;
+  bool operator==(const WordpieceSpan &other) const {
+    return this->begin == other.begin && this->end == other.end;
+  }
+  int length() { return end - begin; }
+};
+
+namespace internal {
+// Finds the wordpiece window arond the given span_of_interest. If the number
+// of wordpieces in this window is smaller than max_num_wordpieces_in_window
+// it is expanded around the span of interest.
+WordpieceSpan FindWordpiecesWindowAroundSpan(
+    const CodepointSpan &span_of_interest, const std::vector<Token> &tokens,
+    const std::vector<int32_t> &word_starts, int num_wordpieces,
+    int max_num_wordpieces_in_window);
+// Expands the given wordpiece window around the given window to the be
+// maximal possible while making sure it includes only full tokens.
+WordpieceSpan ExpandWindowAndAlign(int max_num_wordpieces_in_window,
+                                   int num_wordpieces,
+                                   WordpieceSpan wordpiece_span_to_expand);
+// Returns the index of the last token which ends before wordpiece_end.
+int FindLastFullTokenIndex(const std::vector<int32_t> &word_starts,
+                           int num_wordpieces, int wordpiece_end);
+// Returns the index of the token which includes first_wordpiece_index.
+int FindFirstFullTokenIndex(const std::vector<int32_t> &word_starts,
+                            int first_wordpiece_index);
+// Given wordpiece_span, and max_num_wordpieces, finds:
+//   1. The first token which includes wordpiece_span.begin.
+//   2. The length of tokens sequence which starts from this token and:
+//      a. Its last token's last wordpiece index ends before wordpiece_span.end.
+//      b. Its overall number of wordpieces is at most max_num_wordpieces.
+// Returns the updated wordpiece_span: begin and end wordpieces of this token
+// sequence.
+WordpieceSpan FindFullTokensSpanInWindow(
+    const std::vector<int32_t> &word_starts,
+    const WordpieceSpan &wordpiece_span, int max_num_wordpieces,
+    int num_wordpieces, int *first_token_index, int *num_tokens);
+
+}  // namespace internal
+// Converts sequence of IOB tags to AnnotatedSpans. Ignores illegal sequences.
+// Setting label_filter can also help ignore certain label tags like "NAM" or
+// "NOM".
+// The inside tag can be ignored when setting relaxed_inside_label_matching,
+// e.g. B-NAM-location, I-NAM-other, E-NAM-location would be considered a valid
+// sequence.
+// The label category matching can be ignored when setting
+// relaxed_label_category_matching. The matching will only operate at the entity
+// level, e.g. B-NAM-location, E-NOM-location would be considered a valid
+// sequence.
+bool ConvertTagsToAnnotatedSpans(const VectorSpan<Token> &tokens,
+                                 const std::vector<std::string> &tags,
+                                 const std::vector<std::string> &label_filter,
+                                 bool relaxed_inside_label_matching,
+                                 bool relaxed_label_category_matching,
+                                 float priority_score,
+                                 std::vector<AnnotatedSpan> *results);
+
+// Like the previous function but instead of getting the tags as strings
+// the input is PodNerModel_::LabelT along with the collections vector which
+// hold the collection name and priorities. e.g. a tag was "B-NAM-location" and
+// the priority_score was 1.0 it would be Label(BoiseType_BEGIN,
+// MentionType_NAM, 1) and collections={{"xxx", 1., 1.},
+// {"location", 1., 1.}, {"yyy", 1., 1.}, ...}.
+bool ConvertTagsToAnnotatedSpans(
+    const VectorSpan<Token> &tokens,
+    const std::vector<PodNerModel_::LabelT> &labels,
+    const std::vector<PodNerModel_::CollectionT> &collections,
+    const std::vector<PodNerModel_::Label_::MentionType> &mention_filter,
+    bool relaxed_inside_label_matching, bool relaxed_mention_type_matching,
+    std::vector<AnnotatedSpan> *results);
+
+// Merge two overlaping sequences of labels, the result is placed into the left
+// sequence. In the overlapping part takes the labels from the left sequence on
+// the first half and from the right on the second half.
+bool MergeLabelsIntoLeftSequence(
+    const std::vector<PodNerModel_::LabelT> &labels_right,
+    int index_first_right_tag_in_left,
+    std::vector<PodNerModel_::LabelT> *labels_left);
+
+// This class is used to slide over {wordpiece_indices, token_starts, tokens} in
+// windows of at most max_num_wordpieces while assuring that each window
+// contains only full tokens.
+class WindowGenerator {
+ public:
+  WindowGenerator(const std::vector<int32_t> &wordpiece_indices,
+                  const std::vector<int32_t> &token_starts,
+                  const std::vector<Token> &tokens, int max_num_wordpieces,
+                  int sliding_window_overlap,
+                  const CodepointSpan &span_of_interest);
+
+  bool Next(VectorSpan<int32_t> *cur_wordpiece_indices,
+            VectorSpan<int32_t> *cur_token_starts,
+            VectorSpan<Token> *cur_tokens);
+
+  bool Done() const {
+    return previous_wordpiece_span_.end >= entire_wordpiece_span_.end;
+  }
+
+ private:
+  const std::vector<int32_t> *wordpiece_indices_;
+  const std::vector<int32_t> *token_starts_;
+  const std::vector<Token> *tokens_;
+  int max_num_effective_wordpieces_;
+  int sliding_window_num_wordpieces_overlap_;
+  WordpieceSpan entire_wordpiece_span_;
+  WordpieceSpan next_wordpiece_span_;
+  WordpieceSpan previous_wordpiece_span_;
+};
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ANNOTATOR_POD_NER_UTILS_H_
diff --git a/native/annotator/pod_ner/utils_test.cc b/native/annotator/pod_ner/utils_test.cc
new file mode 100644
index 0000000..fdc82f2
--- /dev/null
+++ b/native/annotator/pod_ner/utils_test.cc
@@ -0,0 +1,905 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/pod_ner/utils.h"
+
+#include <iterator>
+
+#include "annotator/model_generated.h"
+#include "annotator/types.h"
+#include "utils/tokenizer-utils.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/container/flat_hash_map.h"
+#include "absl/strings/str_split.h"
+
+namespace libtextclassifier3 {
+namespace {
+
+using ::testing::IsEmpty;
+using ::testing::Not;
+
+using PodNerModel_::CollectionT;
+using PodNerModel_::LabelT;
+using PodNerModel_::Label_::BoiseType;
+using PodNerModel_::Label_::BoiseType_BEGIN;
+using PodNerModel_::Label_::BoiseType_END;
+using PodNerModel_::Label_::BoiseType_INTERMEDIATE;
+using PodNerModel_::Label_::BoiseType_O;
+using PodNerModel_::Label_::BoiseType_SINGLE;
+using PodNerModel_::Label_::MentionType;
+using PodNerModel_::Label_::MentionType_NAM;
+using PodNerModel_::Label_::MentionType_NOM;
+using PodNerModel_::Label_::MentionType_UNDEFINED;
+
+constexpr float kPriorityScore = 0.;
+const std::vector<std::string>& kCollectionNames =
+    *new std::vector<std::string>{"undefined",    "location", "person", "art",
+                                  "organization", "entitiy",  "xxx"};
+const auto& kStringToBoiseType = *new absl::flat_hash_map<
+    absl::string_view, libtextclassifier3::PodNerModel_::Label_::BoiseType>({
+    {"B", libtextclassifier3::PodNerModel_::Label_::BoiseType_BEGIN},
+    {"O", libtextclassifier3::PodNerModel_::Label_::BoiseType_O},
+    {"I", libtextclassifier3::PodNerModel_::Label_::BoiseType_INTERMEDIATE},
+    {"S", libtextclassifier3::PodNerModel_::Label_::BoiseType_SINGLE},
+    {"E", libtextclassifier3::PodNerModel_::Label_::BoiseType_END},
+});
+const auto& kStringToMentionType = *new absl::flat_hash_map<
+    absl::string_view, libtextclassifier3::PodNerModel_::Label_::MentionType>(
+    {{"NAM", libtextclassifier3::PodNerModel_::Label_::MentionType_NAM},
+     {"NOM", libtextclassifier3::PodNerModel_::Label_::MentionType_NOM}});
+LabelT CreateLabel(BoiseType boise_type, MentionType mention_type,
+                   int collection_id) {
+  LabelT label;
+  label.boise_type = boise_type;
+  label.mention_type = mention_type;
+  label.collection_id = collection_id;
+  return label;
+}
+std::vector<PodNerModel_::LabelT> TagsToLabels(
+    const std::vector<std::string>& tags) {
+  std::vector<PodNerModel_::LabelT> labels;
+  for (const auto& tag : tags) {
+    if (tag == "O") {
+      labels.emplace_back(CreateLabel(BoiseType_O, MentionType_UNDEFINED, 0));
+    } else {
+      std::vector<absl::string_view> tag_parts = absl::StrSplit(tag, '-');
+      labels.emplace_back(CreateLabel(
+          kStringToBoiseType.at(tag_parts[0]),
+          kStringToMentionType.at(tag_parts[1]),
+          std::distance(
+              kCollectionNames.begin(),
+              std::find(kCollectionNames.begin(), kCollectionNames.end(),
+                        std::string(tag_parts[2].substr(
+                            tag_parts[2].rfind('/') + 1))))));
+    }
+  }
+  return labels;
+}
+
+std::vector<CollectionT> GetCollections() {
+  std::vector<CollectionT> collections;
+  for (const std::string& collection_name : kCollectionNames) {
+    CollectionT collection;
+    collection.name = collection_name;
+    collection.single_token_priority_score = kPriorityScore;
+    collection.multi_token_priority_score = kPriorityScore;
+    collections.emplace_back(collection);
+  }
+  return collections;
+}
+
+class ConvertTagsToAnnotatedSpansTest : public testing::TestWithParam<bool> {};
+INSTANTIATE_TEST_SUITE_P(TagsAndLabelsTest, ConvertTagsToAnnotatedSpansTest,
+                         testing::Values(true, false));
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansHandlesBIESequence) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NAM-/saft/location",
+                                   "I-NAM-/saft/location",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_EQ(annotations.size(), 1);
+  EXPECT_EQ(annotations[0].span, CodepointSpan(10, 23));
+  EXPECT_EQ(annotations[0].classification[0].collection, "location");
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansHandlesSSequence) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "His father was it.";
+  std::vector<std::string> tags = {"O", "S-NAM-/saft/person", "O", "O"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_EQ(annotations.size(), 1);
+  EXPECT_EQ(annotations[0].span, CodepointSpan(4, 10));
+  EXPECT_EQ(annotations[0].classification[0].collection, "person");
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansHandlesMultiple) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text =
+      "Jaromir Jagr, Barak Obama and I met in Google New York City";
+  std::vector<std::string> tags = {"B-NAM-/saft/person",
+                                   "E-NAM-/saft/person",
+                                   "B-NOM-/saft/person",
+                                   "E-NOM-/saft/person",
+                                   "O",
+                                   "O",
+                                   "O",
+                                   "O",
+                                   "S-NAM-/saft/organization",
+                                   "B-NAM-/saft/location",
+                                   "I-NAM-/saft/location",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+
+    ASSERT_EQ(annotations.size(), 4);
+    EXPECT_EQ(annotations[0].span, CodepointSpan(0, 13));
+    ASSERT_THAT(annotations[0].classification, Not(IsEmpty()));
+    EXPECT_EQ(annotations[0].classification[0].collection, "person");
+    EXPECT_EQ(annotations[1].span, CodepointSpan(14, 25));
+    ASSERT_THAT(annotations[1].classification, Not(IsEmpty()));
+    EXPECT_EQ(annotations[1].classification[0].collection, "person");
+    EXPECT_EQ(annotations[2].span, CodepointSpan(39, 45));
+    ASSERT_THAT(annotations[2].classification, Not(IsEmpty()));
+    EXPECT_EQ(annotations[2].classification[0].collection, "organization");
+    EXPECT_EQ(annotations[3].span, CodepointSpan(46, 59));
+    ASSERT_THAT(annotations[3].classification, Not(IsEmpty()));
+    EXPECT_EQ(annotations[3].classification[0].collection, "location");
+  }
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansHandlesMultipleFirstTokenNotFirst) {
+  std::vector<AnnotatedSpan> annotations;
+  std::vector<Token> original_tokens = TokenizeOnSpace(
+      "Jaromir Jagr, Barak Obama and I met in Google New York City");
+  std::vector<std::string> tags = {"B-NOM-/saft/person",
+                                   "E-NOM-/saft/person",
+                                   "O",
+                                   "O",
+                                   "O",
+                                   "O",
+                                   "S-NAM-/saft/organization",
+                                   "B-NAM-/saft/location",
+                                   "I-NAM-/saft/location",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(original_tokens.begin() + 2, original_tokens.end()),
+        tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(original_tokens.begin() + 2, original_tokens.end()),
+        TagsToLabels(tags), GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  ASSERT_EQ(annotations.size(), 3);
+  EXPECT_EQ(annotations[0].span, CodepointSpan(14, 25));
+  ASSERT_THAT(annotations[0].classification, Not(IsEmpty()));
+  EXPECT_EQ(annotations[0].classification[0].collection, "person");
+  EXPECT_EQ(annotations[1].span, CodepointSpan(39, 45));
+  ASSERT_THAT(annotations[1].classification, Not(IsEmpty()));
+  EXPECT_EQ(annotations[1].classification[0].collection, "organization");
+  EXPECT_EQ(annotations[2].span, CodepointSpan(46, 59));
+  ASSERT_THAT(annotations[2].classification, Not(IsEmpty()));
+  EXPECT_EQ(annotations[2].classification[0].collection, "location");
+}
+
+TEST(PodNerUtilsTest, ConvertTagsToAnnotatedSpansInvalidCollection) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O", "O", "S-NAM-/saft/invalid_collection"};
+
+  ASSERT_FALSE(ConvertTagsToAnnotatedSpans(
+      VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+      GetCollections(),
+      /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+      /*relaxed_inside_label_matching=*/false,
+      /*relaxed_mention_type_matching=*/false, &annotations));
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansIgnoresInconsistentStart) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NAM-/saft/xxx",
+                                   "I-NAM-/saft/location",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansIgnoresInconsistentLabelTypeStart) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NOM-/saft/location",
+                                   "I-NAM-/saft/location",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansIgnoresInconsistentInside) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NAM-/saft/location",
+                                   "I-NAM-/saft/xxx",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansIgnoresInconsistentLabelTypeInside) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NAM-/saft/location",
+                                   "I-NOM-/saft/location",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansHandlesInconsistentInside) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NAM-/saft/location",
+                                   "I-NAM-/saft/xxx",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/true,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/true,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_EQ(annotations.size(), 1);
+  EXPECT_EQ(annotations[0].span, CodepointSpan(10, 23));
+  EXPECT_EQ(annotations[0].classification[0].collection, "location");
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansIgnoresInconsistentEnd) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NAM-/saft/location",
+                                   "I-NAM-/saft/location",
+                                   "E-NAM-/saft/xxx"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansIgnoresInconsistentLabelTypeEnd) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NAM-/saft/location",
+                                   "I-NAM-/saft/location",
+                                   "E-NOM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST_P(
+    ConvertTagsToAnnotatedSpansTest,
+    ConvertTagsToAnnotatedSpansHandlesInconsistentLabelTypeWhenEntityMatches) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NOM-/saft/location",
+                                   "I-NOM-/saft/location",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NAM", "NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/true, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NAM, MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/true, &annotations));
+  }
+
+  EXPECT_EQ(annotations.size(), 1);
+  EXPECT_EQ(annotations[0].span, CodepointSpan(10, 23));
+  EXPECT_EQ(annotations[0].classification[0].collection, "location");
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansIgnoresFilteredLabel) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NAM-/saft/location",
+                                   "I-NAM-/saft/location",
+                                   "E-NAM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{"NOM"},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{MentionType_NOM},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST_P(ConvertTagsToAnnotatedSpansTest,
+       ConvertTagsToAnnotatedSpansWithEmptyLabelFilterIgnoresAll) {
+  std::vector<AnnotatedSpan> annotations;
+  std::string text = "We met in New York City";
+  std::vector<std::string> tags = {"O",
+                                   "O",
+                                   "O",
+                                   "B-NOM-/saft/location",
+                                   "I-NOM-/saft/location",
+                                   "E-NOM-/saft/location"};
+  if (GetParam()) {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), tags,
+        /*label_filter=*/{},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_label_category_matching=*/false, kPriorityScore,
+        &annotations));
+  } else {
+    ASSERT_TRUE(ConvertTagsToAnnotatedSpans(
+        VectorSpan<Token>(TokenizeOnSpace(text)), TagsToLabels(tags),
+        GetCollections(),
+        /*mention_filter=*/{},
+        /*relaxed_inside_label_matching=*/false,
+        /*relaxed_mention_type_matching=*/false, &annotations));
+  }
+
+  EXPECT_THAT(annotations, IsEmpty());
+}
+
+TEST(PodNerUtilsTest, MergeLabelsIntoLeftSequence) {
+  std::vector<PodNerModel_::LabelT> original_labels_left;
+  original_labels_left.emplace_back(
+      CreateLabel(BoiseType_O, MentionType_UNDEFINED, 0));
+  original_labels_left.emplace_back(
+      CreateLabel(BoiseType_O, MentionType_UNDEFINED, 0));
+  original_labels_left.emplace_back(
+      CreateLabel(BoiseType_O, MentionType_UNDEFINED, 0));
+  original_labels_left.emplace_back(
+      CreateLabel(BoiseType_SINGLE, MentionType_NAM, 1));
+  original_labels_left.emplace_back(
+      CreateLabel(BoiseType_O, MentionType_UNDEFINED, 0));
+  original_labels_left.emplace_back(
+      CreateLabel(BoiseType_O, MentionType_UNDEFINED, 0));
+  original_labels_left.emplace_back(
+      CreateLabel(BoiseType_SINGLE, MentionType_NAM, 2));
+
+  std::vector<PodNerModel_::LabelT> labels_right;
+  labels_right.emplace_back(
+      CreateLabel(BoiseType_BEGIN, MentionType_UNDEFINED, 3));
+  labels_right.emplace_back(CreateLabel(BoiseType_O, MentionType_UNDEFINED, 0));
+  labels_right.emplace_back(CreateLabel(BoiseType_O, MentionType_UNDEFINED, 0));
+  labels_right.emplace_back(CreateLabel(BoiseType_BEGIN, MentionType_NAM, 4));
+  labels_right.emplace_back(
+      CreateLabel(BoiseType_INTERMEDIATE, MentionType_UNDEFINED, 4));
+  labels_right.emplace_back(
+      CreateLabel(BoiseType_END, MentionType_UNDEFINED, 4));
+  std::vector<PodNerModel_::LabelT> labels_left = original_labels_left;
+
+  ASSERT_TRUE(MergeLabelsIntoLeftSequence(labels_right,
+                                          /*index_first_right_tag_in_left=*/3,
+                                          &labels_left));
+  EXPECT_EQ(labels_left.size(), 9);
+  EXPECT_EQ(labels_left[0].collection_id, 0);
+  EXPECT_EQ(labels_left[1].collection_id, 0);
+  EXPECT_EQ(labels_left[2].collection_id, 0);
+  EXPECT_EQ(labels_left[3].collection_id, 1);
+  EXPECT_EQ(labels_left[4].collection_id, 0);
+  EXPECT_EQ(labels_left[5].collection_id, 0);
+  EXPECT_EQ(labels_left[6].collection_id, 4);
+  EXPECT_EQ(labels_left[7].collection_id, 4);
+  EXPECT_EQ(labels_left[8].collection_id, 4);
+
+  labels_left = original_labels_left;
+  ASSERT_TRUE(MergeLabelsIntoLeftSequence(labels_right,
+                                          /*index_first_right_tag_in_left=*/2,
+                                          &labels_left));
+  EXPECT_EQ(labels_left.size(), 8);
+  EXPECT_EQ(labels_left[0].collection_id, 0);
+  EXPECT_EQ(labels_left[1].collection_id, 0);
+  EXPECT_EQ(labels_left[2].collection_id, 0);
+  EXPECT_EQ(labels_left[3].collection_id, 1);
+  EXPECT_EQ(labels_left[4].collection_id, 0);
+  EXPECT_EQ(labels_left[5].collection_id, 4);
+  EXPECT_EQ(labels_left[6].collection_id, 4);
+  EXPECT_EQ(labels_left[7].collection_id, 4);
+}
+
+TEST(PodNerUtilsTest, FindWordpiecesWindowAroundSpanAllWordpices) {
+  std::vector<Token> tokens{{"a", 0, 1},    {"b", 2, 3},     {"c", 4, 5},
+                            {"d", 6, 7},    {"e", 8, 9},     {"f", 10, 11},
+                            {"my", 12, 14}, {"name", 15, 19}};
+  std::vector<int32_t> word_starts{0, 2, 3, 5, 6, 7, 10, 11};
+
+  WordpieceSpan wordpieceSpan = internal::FindWordpiecesWindowAroundSpan(
+      {2, 3}, tokens, word_starts,
+      /*num_wordpieces=*/12,
+      /*max_num_wordpieces_in_window=*/15);
+  EXPECT_EQ(wordpieceSpan, WordpieceSpan(0, 12));
+}
+
+TEST(PodNerUtilsTest, FindWordpiecesWindowAroundSpanInMiddle) {
+  std::vector<Token> tokens{{"a", 0, 1},    {"b", 2, 3},     {"c", 4, 5},
+                            {"d", 6, 7},    {"e", 8, 9},     {"f", 10, 11},
+                            {"my", 12, 14}, {"name", 15, 19}};
+  std::vector<int32_t> word_starts{0, 2, 3, 5, 6, 7, 10, 11};
+
+  WordpieceSpan wordpieceSpan = internal::FindWordpiecesWindowAroundSpan(
+      {6, 7}, tokens, word_starts,
+      /*num_wordpieces=*/12,
+      /*max_num_wordpieces_in_window=*/5);
+  EXPECT_EQ(wordpieceSpan, WordpieceSpan(3, 8));
+
+  wordpieceSpan = internal::FindWordpiecesWindowAroundSpan(
+      {6, 7}, tokens, word_starts,
+      /*num_wordpieces=*/12,
+      /*max_num_wordpieces_in_window=*/6);
+  EXPECT_EQ(wordpieceSpan, WordpieceSpan(3, 9));
+
+  wordpieceSpan = internal::FindWordpiecesWindowAroundSpan(
+      {12, 14}, tokens, word_starts,
+      /*num_wordpieces=*/12,
+      /*max_num_wordpieces_in_window=*/3);
+  EXPECT_EQ(wordpieceSpan, WordpieceSpan(9, 12));
+}
+
+TEST(PodNerUtilsTest, FindWordpiecesWindowAroundSpanCloseToStart) {
+  std::vector<Token> tokens{{"a", 0, 1},    {"b", 2, 3},     {"c", 4, 5},
+                            {"d", 6, 7},    {"e", 8, 9},     {"f", 10, 11},
+                            {"my", 12, 14}, {"name", 15, 19}};
+  std::vector<int32_t> word_starts{0, 2, 3, 5, 6, 7, 10, 11};
+
+  WordpieceSpan wordpieceSpan = internal::FindWordpiecesWindowAroundSpan(
+      {2, 3}, tokens, word_starts,
+      /*num_wordpieces=*/12,
+      /*max_num_wordpieces_in_window=*/7);
+  EXPECT_EQ(wordpieceSpan, WordpieceSpan(0, 7));
+}
+
+TEST(PodNerUtilsTest, FindWordpiecesWindowAroundSpanCloseToEnd) {
+  std::vector<Token> tokens{{"a", 0, 1},    {"b", 2, 3},     {"c", 4, 5},
+                            {"d", 6, 7},    {"e", 8, 9},     {"f", 10, 11},
+                            {"my", 12, 14}, {"name", 15, 19}};
+  std::vector<int32_t> word_starts{0, 2, 3, 5, 6, 7, 10, 11};
+
+  WordpieceSpan wordpieceSpan = internal::FindWordpiecesWindowAroundSpan(
+      {15, 19}, tokens, word_starts,
+      /*num_wordpieces=*/12,
+      /*max_num_wordpieces_in_window=*/7);
+  EXPECT_EQ(wordpieceSpan, WordpieceSpan(5, 12));
+}
+
+TEST(PodNerUtilsTest, FindWordpiecesWindowAroundSpanBigSpan) {
+  std::vector<Token> tokens{{"a", 0, 1},    {"b", 2, 3},     {"c", 4, 5},
+                            {"d", 6, 7},    {"e", 8, 9},     {"f", 10, 11},
+                            {"my", 12, 14}, {"name", 15, 19}};
+  std::vector<int32_t> word_starts{0, 2, 3, 5, 6, 7, 10, 11};
+
+  WordpieceSpan wordpieceSpan = internal::FindWordpiecesWindowAroundSpan(
+      {0, 19}, tokens, word_starts,
+      /*num_wordpieces=*/12,
+      /*max_num_wordpieces_in_window=*/5);
+  EXPECT_EQ(wordpieceSpan, WordpieceSpan(0, 12));
+}
+
+TEST(PodNerUtilsTest, FindFullTokensSpanInWindow) {
+  std::vector<int32_t> word_starts{0, 2, 3, 5, 6, 7, 10, 11};
+  int first_token_index, num_tokens;
+  WordpieceSpan updated_wordpiece_span = internal::FindFullTokensSpanInWindow(
+      word_starts, /*wordpiece_span=*/{0, 6},
+      /*max_num_wordpieces=*/6, /*num_wordpieces=*/12, &first_token_index,
+      &num_tokens);
+  EXPECT_EQ(updated_wordpiece_span, WordpieceSpan(0, 6));
+  EXPECT_EQ(first_token_index, 0);
+  EXPECT_EQ(num_tokens, 4);
+
+  updated_wordpiece_span = internal::FindFullTokensSpanInWindow(
+      word_starts, /*wordpiece_span=*/{2, 6},
+      /*max_num_wordpieces=*/6, /*num_wordpieces=*/12, &first_token_index,
+      &num_tokens);
+  EXPECT_EQ(updated_wordpiece_span, WordpieceSpan(2, 6));
+  EXPECT_EQ(first_token_index, 1);
+  EXPECT_EQ(num_tokens, 3);
+}
+
+TEST(PodNerUtilsTest, FindFullTokensSpanInWindowStartInMiddleOfToken) {
+  std::vector<int32_t> word_starts{0, 2, 3, 5, 6, 7, 10, 11};
+  int first_token_index, num_tokens;
+  WordpieceSpan updated_wordpiece_span = internal::FindFullTokensSpanInWindow(
+      word_starts, /*wordpiece_span=*/{1, 6},
+      /*max_num_wordpieces=*/6, /*num_wordpieces=*/12, &first_token_index,
+      &num_tokens);
+  EXPECT_EQ(updated_wordpiece_span, WordpieceSpan(0, 6));
+  EXPECT_EQ(first_token_index, 0);
+  EXPECT_EQ(num_tokens, 4);
+}
+
+TEST(PodNerUtilsTest, FindFullTokensSpanInWindowEndsInMiddleOfToken) {
+  std::vector<int32_t> word_starts{0, 2, 3, 5, 6, 7, 10, 11};
+  int first_token_index, num_tokens;
+  WordpieceSpan updated_wordpiece_span = internal::FindFullTokensSpanInWindow(
+      word_starts, /*wordpiece_span=*/{1, 9},
+      /*max_num_wordpieces=*/6, /*num_wordpieces=*/12, &first_token_index,
+      &num_tokens);
+  EXPECT_EQ(updated_wordpiece_span, WordpieceSpan(0, 6));
+  EXPECT_EQ(first_token_index, 0);
+  EXPECT_EQ(num_tokens, 4);
+}
+TEST(PodNerUtilsTest, FindFirstFullTokenIndexSizeOne) {
+  std::vector<int32_t> word_starts{1, 2, 3, 5, 6, 7, 10, 11};
+  int index_first_full_token = internal::FindFirstFullTokenIndex(
+      word_starts, /*first_wordpiece_index=*/2);
+  EXPECT_EQ(index_first_full_token, 1);
+}
+
+TEST(PodNerUtilsTest, FindFirstFullTokenIndexFirst) {
+  std::vector<int32_t> word_starts{1, 2, 3, 5, 6, 7, 10, 11};
+  int index_first_full_token = internal::FindFirstFullTokenIndex(
+      word_starts, /*first_wordpiece_index=*/0);
+  EXPECT_EQ(index_first_full_token, 0);
+}
+
+TEST(PodNerUtilsTest, FindFirstFullTokenIndexSizeGreaterThanOne) {
+  std::vector<int32_t> word_starts{1, 2, 3, 5, 6, 7, 10, 11};
+  int index_first_full_token = internal::FindFirstFullTokenIndex(
+      word_starts, /*first_wordpiece_index=*/4);
+  EXPECT_EQ(index_first_full_token, 2);
+}
+
+TEST(PodNerUtilsTest, FindLastFullTokenIndexSizeOne) {
+  std::vector<int32_t> word_starts{1, 2, 3, 5, 6, 7, 10, 11};
+  int index_last_full_token = internal::FindLastFullTokenIndex(
+      word_starts, /*num_wordpieces=*/12, /*wordpiece_end=*/3);
+  EXPECT_EQ(index_last_full_token, 1);
+}
+
+TEST(PodNerUtilsTest, FindLastFullTokenIndexSizeGreaterThanOne) {
+  std::vector<int32_t> word_starts{1, 3, 4, 6, 8, 9};
+  int index_last_full_token = internal::FindLastFullTokenIndex(
+      word_starts, /*num_wordpieces=*/10, /*wordpiece_end=*/6);
+  EXPECT_EQ(index_last_full_token, 2);
+
+  index_last_full_token = internal::FindLastFullTokenIndex(
+      word_starts, /*num_wordpieces=*/10, /*wordpiece_end=*/7);
+  EXPECT_EQ(index_last_full_token, 2);
+
+  index_last_full_token = internal::FindLastFullTokenIndex(
+      word_starts, /*num_wordpieces=*/10, /*wordpiece_end=*/5);
+  EXPECT_EQ(index_last_full_token, 1);
+}
+
+TEST(PodNerUtilsTest, FindLastFullTokenIndexLast) {
+  std::vector<int32_t> word_starts{1, 2, 3, 5, 6, 7, 10, 11};
+  int index_last_full_token = internal::FindLastFullTokenIndex(
+      word_starts, /*num_wordpieces=*/12, /*wordpiece_end=*/12);
+  EXPECT_EQ(index_last_full_token, 7);
+
+  index_last_full_token = internal::FindLastFullTokenIndex(
+      word_starts, /*num_wordpieces=*/14, /*wordpiece_end=*/14);
+  EXPECT_EQ(index_last_full_token, 7);
+}
+
+TEST(PodNerUtilsTest, FindLastFullTokenIndexBeforeLast) {
+  std::vector<int32_t> word_starts{1, 2, 3, 5, 6, 7, 10, 11};
+  int index_last_full_token = internal::FindLastFullTokenIndex(
+      word_starts, /*num_wordpieces=*/15, /*wordpiece_end=*/12);
+  EXPECT_EQ(index_last_full_token, 6);
+}
+
+TEST(PodNerUtilsTest, ExpandWindowAndAlignSequenceSmallerThanMax) {
+  WordpieceSpan maxWordpieceSpan = internal::ExpandWindowAndAlign(
+      /*max_num_wordpieces_in_window=*/10, /*num_wordpieces=*/8,
+      /*wordpiece_span_to_expand=*/{2, 5});
+  EXPECT_EQ(maxWordpieceSpan, WordpieceSpan(0, 8));
+}
+
+TEST(PodNerUtilsTest, ExpandWindowAndAlignWindowLengthGreaterThanMax) {
+  WordpieceSpan maxWordpieceSpan = internal::ExpandWindowAndAlign(
+      /*max_num_wordpieces_in_window=*/10, /*num_wordpieces=*/100,
+      /*wordpiece_span_to_expand=*/{2, 51});
+  EXPECT_EQ(maxWordpieceSpan, WordpieceSpan(2, 51));
+}
+
+TEST(PodNerUtilsTest, ExpandWindowAndAlignFirstIndexCloseToStart) {
+  WordpieceSpan maxWordpieceSpan = internal::ExpandWindowAndAlign(
+      /*max_num_wordpieces_in_window=*/10, /*num_wordpieces=*/20,
+      /*wordpiece_span_to_expand=*/{2, 4});
+  EXPECT_EQ(maxWordpieceSpan, WordpieceSpan(0, 10));
+}
+
+TEST(PodNerUtilsTest, ExpandWindowAndAlignFirstIndexCloseToEnd) {
+  WordpieceSpan maxWordpieceSpan = internal::ExpandWindowAndAlign(
+      /*max_num_wordpieces_in_window=*/10, /*num_wordpieces=*/20,
+      /*wordpiece_span_to_expand=*/{18, 20});
+  EXPECT_EQ(maxWordpieceSpan, WordpieceSpan(10, 20));
+}
+
+TEST(PodNerUtilsTest, ExpandWindowAndAlignFirstIndexInTheMiddle) {
+  int window_first_wordpiece_index = 10;
+  int window_last_wordpiece_index = 11;
+  WordpieceSpan maxWordpieceSpan = internal::ExpandWindowAndAlign(
+      /*max_num_wordpieces_in_window=*/10, /*num_wordpieces=*/20,
+      /*wordpiece_span_to_expand=*/{10, 12});
+  EXPECT_EQ(maxWordpieceSpan, WordpieceSpan(6, 16));
+
+  window_first_wordpiece_index = 10;
+  window_last_wordpiece_index = 12;
+  maxWordpieceSpan = internal::ExpandWindowAndAlign(
+      /*max_num_wordpieces_in_window=*/10, /*num_wordpieces=*/20,
+      /*wordpiece_span_to_expand=*/{10, 13});
+  EXPECT_EQ(maxWordpieceSpan, WordpieceSpan(7, 17));
+}
+
+TEST(PodNerUtilsTest, WindowGenerator) {
+  std::vector<int32_t> wordpiece_indices = {10, 20, 30, 40, 50, 60, 70, 80};
+  std::vector<Token> tokens{{"a", 0, 1}, {"b", 2, 3}, {"c", 4, 5},
+                            {"d", 6, 7}, {"e", 8, 9}, {"f", 10, 11}};
+  std::vector<int32_t> token_starts{0, 2, 3, 5, 6, 7};
+  WindowGenerator window_generator(wordpiece_indices, token_starts, tokens,
+                                   /*max_num_wordpieces=*/4,
+                                   /*sliding_window_overlap=*/1,
+                                   /*span_of_interest=*/{0, 12});
+  VectorSpan<int32_t> cur_wordpiece_indices;
+  VectorSpan<int32_t> cur_token_starts;
+  VectorSpan<Token> cur_tokens;
+  ASSERT_TRUE(window_generator.Next(&cur_wordpiece_indices, &cur_token_starts,
+                                    &cur_tokens));
+  ASSERT_FALSE(window_generator.Done());
+  ASSERT_EQ(cur_wordpiece_indices.size(), 3);
+  for (int i = 0; i < 3; i++) {
+    ASSERT_EQ(cur_wordpiece_indices[i], wordpiece_indices[i]);
+  }
+  ASSERT_EQ(cur_token_starts.size(), 2);
+  ASSERT_EQ(cur_tokens.size(), 2);
+  for (int i = 0; i < cur_tokens.size(); i++) {
+    ASSERT_EQ(cur_token_starts[i], token_starts[i]);
+    ASSERT_EQ(cur_tokens[i], tokens[i]);
+  }
+
+  ASSERT_TRUE(window_generator.Next(&cur_wordpiece_indices, &cur_token_starts,
+                                    &cur_tokens));
+  ASSERT_FALSE(window_generator.Done());
+  ASSERT_EQ(cur_wordpiece_indices.size(), 4);
+  for (int i = 0; i < cur_wordpiece_indices.size(); i++) {
+    ASSERT_EQ(cur_wordpiece_indices[i], wordpiece_indices[i + 2]);
+  }
+  ASSERT_EQ(cur_token_starts.size(), 3);
+  ASSERT_EQ(cur_tokens.size(), 3);
+  for (int i = 0; i < cur_tokens.size(); i++) {
+    ASSERT_EQ(cur_token_starts[i], token_starts[i + 1]);
+    ASSERT_EQ(cur_tokens[i], tokens[i + 1]);
+  }
+
+  ASSERT_TRUE(window_generator.Next(&cur_wordpiece_indices, &cur_token_starts,
+                                    &cur_tokens));
+  ASSERT_TRUE(window_generator.Done());
+  ASSERT_EQ(cur_wordpiece_indices.size(), 3);
+  for (int i = 0; i < cur_wordpiece_indices.size(); i++) {
+    ASSERT_EQ(cur_wordpiece_indices[i], wordpiece_indices[i + 5]);
+  }
+  ASSERT_EQ(cur_token_starts.size(), 3);
+  ASSERT_EQ(cur_tokens.size(), 3);
+  for (int i = 0; i < cur_tokens.size(); i++) {
+    ASSERT_EQ(cur_token_starts[i], token_starts[i + 3]);
+    ASSERT_EQ(cur_tokens[i], tokens[i + 3]);
+  }
+
+  ASSERT_FALSE(window_generator.Next(&cur_wordpiece_indices, &cur_token_starts,
+                                     &cur_tokens));
+}
+}  // namespace
+}  // namespace libtextclassifier3
diff --git a/native/annotator/test_data/datetime.fb b/native/annotator/test_data/datetime.fb
new file mode 100644
index 0000000..5828b94
--- /dev/null
+++ b/native/annotator/test_data/datetime.fb
Binary files differ
diff --git a/native/annotator/test_data/test_model.fb b/native/annotator/test_data/test_model.fb
index 64b3ac0..2e9418e 100644
--- a/native/annotator/test_data/test_model.fb
+++ b/native/annotator/test_data/test_model.fb
Binary files differ
diff --git a/native/annotator/test_data/test_vocab_model.fb b/native/annotator/test_data/test_vocab_model.fb
index 74b7631..d9d9a94 100644
--- a/native/annotator/test_data/test_vocab_model.fb
+++ b/native/annotator/test_data/test_vocab_model.fb
Binary files differ
diff --git a/native/annotator/vocab/vocab-annotator-dummy.h b/native/annotator/vocab/vocab-annotator-dummy.h
deleted file mode 100644
index eda8a9c..0000000
--- a/native/annotator/vocab/vocab-annotator-dummy.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_ANNOTATOR_DUMMY_H_
-#define LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_ANNOTATOR_DUMMY_H_
-
-#include <string>
-#include <vector>
-
-#include "annotator/feature-processor.h"
-#include "annotator/model_generated.h"
-#include "annotator/types.h"
-#include "utils/i18n/locale.h"
-#include "utils/utf8/unicodetext.h"
-#include "utils/utf8/unilib.h"
-
-namespace libtextclassifier3 {
-
-class VocabAnnotator {
- public:
-  static std::unique_ptr<VocabAnnotator> Create(
-      const VocabModel *model, const FeatureProcessor &feature_processor,
-      const UniLib &unilib) {
-    return nullptr;
-  }
-
-  bool Annotate(const UnicodeText &context,
-                const std::vector<Locale> detected_text_language_tags,
-                bool trigger_on_beginner_words,
-                std::vector<AnnotatedSpan> *results) const {
-    return true;
-  }
-
-  bool ClassifyText(const UnicodeText &context, CodepointSpan click,
-                    const std::vector<Locale> detected_text_language_tags,
-                    bool trigger_on_beginner_words,
-                    ClassificationResult *result) const {
-    return false;
-  }
-};
-
-}  // namespace libtextclassifier3
-
-#endif  // LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_ANNOTATOR_DUMMY_H_
diff --git a/native/annotator/vocab/vocab-annotator-impl.cc b/native/annotator/vocab/vocab-annotator-impl.cc
new file mode 100644
index 0000000..4b5cc73
--- /dev/null
+++ b/native/annotator/vocab/vocab-annotator-impl.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/vocab/vocab-annotator-impl.h"
+
+#include "annotator/feature-processor.h"
+#include "annotator/model_generated.h"
+#include "utils/base/logging.h"
+#include "utils/optional.h"
+#include "utils/strings/numbers.h"
+
+namespace libtextclassifier3 {
+
+VocabAnnotator::VocabAnnotator(
+    std::unique_ptr<VocabLevelTable> vocab_level_table,
+    const std::vector<Locale>& triggering_locales,
+    const FeatureProcessor& feature_processor, const UniLib& unilib,
+    const VocabModel* model)
+    : vocab_level_table_(std::move(vocab_level_table)),
+      triggering_locales_(triggering_locales),
+      feature_processor_(feature_processor),
+      unilib_(unilib),
+      model_(model) {}
+
+std::unique_ptr<VocabAnnotator> VocabAnnotator::Create(
+    const VocabModel* model, const FeatureProcessor& feature_processor,
+    const UniLib& unilib) {
+  std::unique_ptr<VocabLevelTable> vocab_lebel_table =
+      VocabLevelTable::Create(model);
+  if (vocab_lebel_table == nullptr) {
+    TC3_LOG(ERROR) << "Failed to create vocab level table.";
+    return nullptr;
+  }
+  std::vector<Locale> triggering_locales;
+  if (model->triggering_locales() &&
+      !ParseLocales(model->triggering_locales()->c_str(),
+                    &triggering_locales)) {
+    TC3_LOG(ERROR) << "Could not parse model supported locales.";
+    return nullptr;
+  }
+
+  return std::unique_ptr<VocabAnnotator>(
+      new VocabAnnotator(std::move(vocab_lebel_table), triggering_locales,
+                         feature_processor, unilib, model));
+}
+
+bool VocabAnnotator::Annotate(
+    const UnicodeText& context,
+    const std::vector<Locale> detected_text_language_tags,
+    bool trigger_on_beginner_words, std::vector<AnnotatedSpan>* results) const {
+  std::vector<Token> tokens = feature_processor_.Tokenize(context);
+  for (const Token& token : tokens) {
+    ClassificationResult classification_result;
+    CodepointSpan stripped_span;
+    bool found = ClassifyTextInternal(
+        context, {token.start, token.end}, detected_text_language_tags,
+        trigger_on_beginner_words, &classification_result, &stripped_span);
+    if (found) {
+      results->push_back(AnnotatedSpan{stripped_span, {classification_result}});
+    }
+  }
+  return true;
+}
+
+bool VocabAnnotator::ClassifyText(
+    const UnicodeText& context, CodepointSpan click,
+    const std::vector<Locale> detected_text_language_tags,
+    bool trigger_on_beginner_words, ClassificationResult* result) const {
+  CodepointSpan stripped_span;
+  return ClassifyTextInternal(context, click, detected_text_language_tags,
+                              trigger_on_beginner_words, result,
+                              &stripped_span);
+}
+
+bool VocabAnnotator::ClassifyTextInternal(
+    const UnicodeText& context, const CodepointSpan click,
+    const std::vector<Locale> detected_text_language_tags,
+    bool trigger_on_beginner_words, ClassificationResult* classification_result,
+    CodepointSpan* classified_span) const {
+  if (vocab_level_table_ == nullptr) {
+    return false;
+  }
+
+  if (!Locale::IsAnyLocaleSupported(detected_text_language_tags,
+                                    triggering_locales_,
+                                    /*default_value=*/false)) {
+    return false;
+  }
+  const CodepointSpan stripped_span =
+      feature_processor_.StripBoundaryCodepoints(context,
+                                                 {click.first, click.second});
+  const UnicodeText stripped_token = UnicodeText::Substring(
+      context, stripped_span.first, stripped_span.second, /*do_copy=*/false);
+  const std::string lower_token =
+      unilib_.ToLowerText(stripped_token).ToUTF8String();
+
+  const Optional<LookupResult> result = vocab_level_table_->Lookup(lower_token);
+  if (!result.has_value()) {
+    return false;
+  }
+  if (result.value().do_not_trigger_in_upper_case &&
+      unilib_.IsUpper(*stripped_token.begin())) {
+    TC3_VLOG(INFO) << "Not trigger define: proper noun in upper case.";
+    return false;
+  }
+  if (result.value().beginner_level && !trigger_on_beginner_words) {
+    TC3_VLOG(INFO) << "Not trigger define: for beginner only.";
+    return false;
+  }
+  *classification_result =
+      ClassificationResult("dictionary", model_->target_classification_score(),
+                           model_->priority_score());
+  *classified_span = stripped_span;
+
+  return true;
+}
+}  // namespace libtextclassifier3
diff --git a/native/annotator/vocab/vocab-annotator-impl.h b/native/annotator/vocab/vocab-annotator-impl.h
new file mode 100644
index 0000000..1a2194a
--- /dev/null
+++ b/native/annotator/vocab/vocab-annotator-impl.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_ANNOTATOR_IMPL_H_
+#define LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_ANNOTATOR_IMPL_H_
+
+#include "annotator/feature-processor.h"
+#include "annotator/model_generated.h"
+#include "annotator/types.h"
+#include "annotator/vocab/vocab-level-table.h"
+#include "utils/i18n/locale.h"
+#include "utils/utf8/unicodetext.h"
+#include "utils/utf8/unilib.h"
+
+namespace libtextclassifier3 {
+
+// Annotates vocabs of different levels which users may want to look them up
+// in a dictionary.
+class VocabAnnotator {
+ public:
+  static std::unique_ptr<VocabAnnotator> Create(
+      const VocabModel *model, const FeatureProcessor &feature_processor,
+      const UniLib &unilib);
+
+  bool Annotate(const UnicodeText &context,
+                const std::vector<Locale> detected_text_language_tags,
+                bool trigger_on_beginner_words,
+                std::vector<AnnotatedSpan> *results) const;
+
+  bool ClassifyText(const UnicodeText &context, CodepointSpan click,
+                    const std::vector<Locale> detected_text_language_tags,
+                    bool trigger_on_beginner_words,
+                    ClassificationResult *result) const;
+
+ private:
+  explicit VocabAnnotator(std::unique_ptr<VocabLevelTable> vocab_level_table,
+                          const std::vector<Locale> &triggering_locales,
+                          const FeatureProcessor &feature_processor,
+                          const UniLib &unilib, const VocabModel *model);
+
+  bool ClassifyTextInternal(
+      const UnicodeText &context, const CodepointSpan click,
+      const std::vector<Locale> detected_text_language_tags,
+      bool trigger_on_beginner_words,
+      ClassificationResult *classification_result,
+      CodepointSpan *classified_span) const;
+  bool ShouldTriggerOnBeginnerVocabs() const;
+
+  const std::unique_ptr<VocabLevelTable> vocab_level_table_;
+  // Locales for which this annotator triggers.
+  const std::vector<Locale> triggering_locales_;
+  const FeatureProcessor &feature_processor_;
+  const UniLib &unilib_;
+  const VocabModel *model_;
+};
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_ANNOTATOR_IMPL_H_
diff --git a/native/annotator/vocab/vocab-level-table.cc b/native/annotator/vocab/vocab-level-table.cc
new file mode 100644
index 0000000..71b3d8f
--- /dev/null
+++ b/native/annotator/vocab/vocab-level-table.cc
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "annotator/vocab/vocab-level-table.h"
+
+#include <cstddef>
+#include <memory>
+
+#include "annotator/model_generated.h"
+#include "utils/base/endian.h"
+#include "utils/container/bit-vector.h"
+#include "utils/optional.h"
+#include "marisa/trie.h"
+
+namespace libtextclassifier3 {
+
+std::unique_ptr<VocabLevelTable> VocabLevelTable::Create(
+    const VocabModel* model) {
+  if (!LittleEndian::IsLittleEndian()) {
+    // TODO(tonymak) Consider making this work on a big endian device.
+    TC3_LOG(ERROR)
+        << "VocabLevelTable is only working on a little endian device.";
+    return nullptr;
+  }
+  const flatbuffers::Vector<uint8_t>* trie_data = model->vocab_trie();
+  if (trie_data == nullptr) {
+    TC3_LOG(ERROR) << "vocab_trie is missing from the model file.";
+    return nullptr;
+  }
+  std::unique_ptr<marisa::Trie> vocab_trie(new marisa::Trie);
+  vocab_trie->map(trie_data->data(), trie_data->size());
+
+  return std::unique_ptr<VocabLevelTable>(new VocabLevelTable(
+      model, std::move(vocab_trie), BitVector(model->beginner_level()),
+      BitVector(model->do_not_trigger_in_upper_case())));
+}
+
+VocabLevelTable::VocabLevelTable(const VocabModel* model,
+                                 std::unique_ptr<marisa::Trie> vocab_trie,
+                                 const BitVector beginner_level,
+                                 const BitVector do_not_trigger_in_upper_case)
+    : model_(model),
+      vocab_trie_(std::move(vocab_trie)),
+      beginner_level_(beginner_level),
+      do_not_trigger_in_upper_case_(do_not_trigger_in_upper_case) {}
+
+Optional<LookupResult> VocabLevelTable::Lookup(const std::string& vocab) const {
+  marisa::Agent agent;
+  agent.set_query(vocab.data(), vocab.size());
+  if (vocab_trie_->lookup(agent)) {
+    const int vector_idx = agent.key().id();
+    return Optional<LookupResult>({beginner_level_[vector_idx],
+                                   do_not_trigger_in_upper_case_[vector_idx]});
+  }
+  return Optional<LookupResult>();
+}
+}  // namespace libtextclassifier3
diff --git a/native/annotator/vocab/vocab-level-table.h b/native/annotator/vocab/vocab-level-table.h
new file mode 100644
index 0000000..f83ad72
--- /dev/null
+++ b/native/annotator/vocab/vocab-level-table.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_LEVEL_TABLE_H_
+#define LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_LEVEL_TABLE_H_
+
+#include "annotator/model_generated.h"
+#include "annotator/types.h"
+#include "utils/container/bit-vector.h"
+#include "marisa/trie.h"
+
+namespace libtextclassifier3 {
+
+struct LookupResult {
+  // Whether to trigger define for users of beginner proficiency.
+  bool beginner_level;
+  // Whether if we should avoid triggering define if the leading character is in
+  // upper case.
+  bool do_not_trigger_in_upper_case;
+};
+
+// A table of vocabs and their levels which is backed by a marisa trie.
+// See http://www.s-yata.jp/marisa-trie/docs/readme.en.html.
+class VocabLevelTable {
+ public:
+  static std::unique_ptr<VocabLevelTable> Create(const VocabModel* model);
+
+  Optional<LookupResult> Lookup(const std::string& vocab) const;
+
+ private:
+  explicit VocabLevelTable(const VocabModel* model,
+                           std::unique_ptr<marisa::Trie> vocab_trie,
+                           const BitVector beginner_level,
+                           const BitVector do_not_trigger_in_upper_case);
+  static const VocabModel* LoadAndVerifyModel();
+
+  const VocabModel* model_;
+  const std::unique_ptr<marisa::Trie> vocab_trie_;
+  const BitVector beginner_level_;
+  const BitVector do_not_trigger_in_upper_case_;
+};
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_ANNOTATOR_VOCAB_VOCAB_LEVEL_TABLE_H_
diff --git a/native/tensorflow_models/seq_flow_lite/tflite_ops/layer_norm.cc b/native/tensorflow_models/seq_flow_lite/tflite_ops/layer_norm.cc
new file mode 100644
index 0000000..e28b04d
--- /dev/null
+++ b/native/tensorflow_models/seq_flow_lite/tflite_ops/layer_norm.cc
@@ -0,0 +1,347 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+#include "tensorflow_models/seq_flow_lite/tflite_ops/layer_norm.h"
+
+#include <unordered_set>
+#include <vector>
+
+#include "tensorflow_models/seq_flow_lite/tflite_ops/quantization_util.h"
+#include "tensorflow/lite/kernels/kernel_util.h"
+
+namespace seq_flow_lite {
+namespace ops {
+namespace custom {
+
+namespace {
+
+const int kInputIndex = 0;
+const int kScaleIndex = 1;
+const int kOffsetIndex = 2;
+const int kAxisIndex = 3;
+const int kOutputIndex = 0;
+
+TfLiteStatus Resize(TfLiteContext* context, TfLiteNode* node) {
+  if (node->outputs->size != 1) {
+    return kTfLiteError;
+  }
+
+  TfLiteTensor* input = &context->tensors[node->inputs->data[kInputIndex]];
+  TfLiteTensor* scale = &context->tensors[node->inputs->data[kScaleIndex]];
+  TfLiteTensor* offset = &context->tensors[node->inputs->data[kOffsetIndex]];
+  TF_LITE_ENSURE_EQ(context, input->type, kTfLiteUInt8);
+  TF_LITE_ENSURE_EQ(context, offset->dims->data[0], 1);
+  TF_LITE_ENSURE_EQ(context, offset->dims->size, 1);
+  TF_LITE_ENSURE_EQ(context, offset->type, kTfLiteUInt8);
+  TF_LITE_ENSURE_EQ(context, scale->dims->data[0], 1);
+  TF_LITE_ENSURE_EQ(context, scale->dims->size, 1);
+  TF_LITE_ENSURE_EQ(context, scale->type, kTfLiteUInt8);
+  if (node->inputs->size == 4) {
+    TfLiteTensor* axis = &context->tensors[node->inputs->data[kAxisIndex]];
+    TF_LITE_ENSURE_EQ(context, axis->type, kTfLiteInt32);
+  }
+
+  TfLiteTensor* output = &context->tensors[node->outputs->data[kOutputIndex]];
+  TF_LITE_ENSURE_EQ(context, output->type, kTfLiteUInt8);
+  return context->ResizeTensor(context, output,
+                               TfLiteIntArrayCopy(input->dims));
+}
+
+int GetNumberOfSteps(const TfLiteTensor* input) {
+  int number_of_steps = 1;
+  for (int i = 0; i < input->dims->size; ++i) {
+    number_of_steps *= input->dims->data[i];
+  }
+  return number_of_steps;
+}
+
+inline int GetNumberOfFeatures(const TfLiteTensor* input, const int* axis,
+                               const int num_axis) {
+  int num_features = 1;
+  for (int i = 0; i < num_axis; ++i) {
+    num_features *= input->dims->data[axis[i]];
+  }
+  return num_features;
+}
+
+// Performs sanity checks on input axis and resolves into valid dimensions.
+inline bool ResolveAxis(const int num_dims, const int* axis, const int num_axis,
+                        int* out_axis, int* out_num_axis) {
+  *out_num_axis = 0;
+  // Short-circuit axis resolution for scalars; the axis will go unused.
+  if (num_dims == 0) {
+    return true;
+  }
+
+  // Using an unordered set to reduce complexity in looking up duplicates.
+  std::unordered_set<int> unique_indices;
+  for (int64_t idx = 0; idx < num_axis; ++idx) {
+    // Handle negative index.
+    int current = axis[idx] < 0 ? (axis[idx] + num_dims) : axis[idx];
+    assert(current >= 0 && current < num_dims);
+    // Only adding the axis if it wasn't added before.
+    if (unique_indices.find(current) == unique_indices.end()) {
+      unique_indices.insert(current);
+      out_axis[*out_num_axis] = current;
+      *out_num_axis += 1;
+    }
+  }
+  return true;
+}
+
+// Given current position in the input array, the api computes the next valid
+// index.
+bool ValidIndex(const int* input_dims, const int input_dims_size,
+                int* curr_pos) {
+  if (input_dims_size == 0) {
+    return false;
+  }
+  assert(input_dims != nullptr);
+  assert(curr_pos != nullptr);
+  for (int idx = input_dims_size - 1; idx >= 0; --idx) {
+    int current_val = curr_pos[idx] + 1;
+    assert(input_dims[idx] >= current_val);
+    if (input_dims[idx] == current_val) {
+      curr_pos[idx] = 0;
+    } else {
+      curr_pos[idx] = current_val;
+      return true;
+    }
+  }
+  return false;
+}
+
+// Gets next offset depending on reduction axis. Implementation borrowed from
+// tflite reduce mean implementation.
+int GetOffset(const int* input_dims, const int input_dims_size,
+              const int* curr_pos, const int* axis, const int axis_size) {
+  if (input_dims_size == 0) return 0;
+  assert(input_dims != nullptr);
+  assert(curr_pos != nullptr);
+  int offset = 0;
+  for (int idx = 0; idx < input_dims_size; ++idx) {
+    // if idx is part of reduction axes, we skip offset calculation.
+    bool is_axis = false;
+    if (axis != nullptr) {
+      for (int redux = 0; redux < axis_size; ++redux) {
+        if (idx == axis[redux]) {
+          is_axis = true;
+          break;
+        }
+      }
+    }
+    if (!is_axis) offset = offset * input_dims[idx] + curr_pos[idx];
+  }
+
+  return offset;
+}
+
+// TODO(b/132896827): Current implementation needs further evaluation to reduce
+// space time complexities.
+TfLiteStatus FlexibleLayerNorm(const TfLiteTensor* input, const float scale,
+                               const float offset, const int* axis,
+                               const int num_axis, TfLiteTensor* output) {
+  int num_features = GetNumberOfFeatures(input, &axis[0], num_axis);
+  int time_steps = static_cast<int>(GetNumberOfSteps(input) / num_features);
+
+  std::vector<float> sum_x(time_steps, 0.0f);
+  std::vector<float> sum_xx(time_steps, 0.0f);
+  std::vector<int> index_iter(input->dims->size, 0);
+
+  // Computing sum and squared sum for features across the reduction axes.
+  do {
+    // Not passing reduction axes to get the input offset as we are simply
+    // iterating through the multidimensional array.
+    int input_offset = GetOffset(input->dims->data, input->dims->size,
+                                 &index_iter[0], nullptr, 0);
+    // Passing in the valid reduction axes as we would like to get the output
+    // offset after reduction.
+    int stats_offset = GetOffset(input->dims->data, input->dims->size,
+                                 &index_iter[0], &axis[0], num_axis);
+    float input_val = PodDequantize(*input, input_offset);
+    sum_x[stats_offset] += input_val;
+    sum_xx[stats_offset] += input_val * input_val;
+  } while (ValidIndex(input->dims->data, input->dims->size, &index_iter[0]));
+
+  std::vector<float> multiplier(time_steps, 1.0f);
+  std::vector<float> bias(time_steps, 0.0f);
+
+  // Computing stats for the reduction axes.
+  for (int i = 0; i < time_steps; ++i) {
+    sum_x[i] = sum_x[i] / num_features;
+    sum_xx[i] = sum_xx[i] / num_features;
+    const float variance = sum_xx[i] - sum_x[i] * sum_x[i];
+    const float inverse_stddev = 1 / sqrt(variance + 1e-6);
+    multiplier[i] = inverse_stddev * scale;
+    bias[i] = offset - sum_x[i] * inverse_stddev * scale;
+  }
+
+  const float out_inverse_scale = 1.0f / output->params.scale;
+  const int32_t out_zero_point = output->params.zero_point;
+  uint8_t* out_ptr = output->data.uint8;
+  std::fill(index_iter.begin(), index_iter.end(), 0);
+
+  // Using the stats to fill the output pointer.
+  do {
+    // Not passing reduction axes to get the input offset as we are simply
+    // iterating through the multidimensional array.
+    int input_offset = GetOffset(input->dims->data, input->dims->size,
+                                 &index_iter[0], nullptr, 0);
+    // Passing in the valid reduction axes as we would like to get the output
+    // offset after reduction.
+    int stats_offset = GetOffset(input->dims->data, input->dims->size,
+                                 &index_iter[0], &axis[0], num_axis);
+    float input_val = PodDequantize(*input, input_offset);
+
+    const float value =
+        input_val * multiplier[stats_offset] + bias[stats_offset];
+    out_ptr[input_offset] =
+        PodQuantize(value, out_zero_point, out_inverse_scale);
+  } while (ValidIndex(input->dims->data, input->dims->size, &index_iter[0]));
+
+  return kTfLiteOk;
+}
+
+TfLiteStatus DefaultLayerNormFloat(const TfLiteTensor* input, const float scale,
+                                   const float offset, TfLiteTensor* output) {
+  const int input_rank = input->dims->size;
+  const int num_features = input->dims->data[input_rank - 1];
+  const int time_steps =
+      static_cast<int>(GetNumberOfSteps(input) / num_features);
+  float* out_ptr = output->data.f;
+  for (int i = 0; i < time_steps; ++i) {
+    float sum_x = 0;
+    float sum_xx = 0;
+    for (int j = 0, index = i * num_features; j < num_features; ++j, ++index) {
+      sum_x += input->data.f[index];
+      sum_xx += input->data.f[index] * input->data.f[index];
+    }
+    const float exp_xx = sum_xx / num_features;
+    const float exp_x = sum_x / num_features;
+    const float variance = exp_xx - exp_x * exp_x;
+    const float inverse_stddev = 1 / sqrt(variance + 1e-6);
+    const float multiplier = inverse_stddev * scale;
+
+    const float bias = offset - exp_x * inverse_stddev * scale;
+    for (int j = 0, index = i * num_features; j < num_features; ++j, ++index) {
+      out_ptr[index] = input->data.f[index] * multiplier + bias;
+    }
+  }
+  return kTfLiteOk;
+}
+
+TfLiteStatus DefaultLayerNorm(const TfLiteTensor* input, const float scale,
+                              const float offset, TfLiteTensor* output) {
+  const int input_rank = input->dims->size;
+  const int num_features = input->dims->data[input_rank - 1];
+  const int time_steps =
+      static_cast<int>(GetNumberOfSteps(input) / num_features);
+
+  std::vector<float> temp_buffer(num_features, 0.0f);
+  const float out_inverse_scale = 1.0f / output->params.scale;
+  const int32_t out_zero_point = output->params.zero_point;
+  uint8_t* out_ptr = output->data.uint8;
+  for (int i = 0; i < time_steps; ++i) {
+    float sum_x = 0;
+    float sum_xx = 0;
+    for (int j = 0, index = i * num_features; j < num_features; ++j, ++index) {
+      temp_buffer[j] = PodDequantize(*input, index);
+      sum_x += temp_buffer[j];
+      sum_xx += temp_buffer[j] * temp_buffer[j];
+    }
+    const float exp_xx = sum_xx / num_features;
+    const float exp_x = sum_x / num_features;
+    const float variance = exp_xx - exp_x * exp_x;
+    const float inverse_stddev = 1 / sqrt(variance + 1e-6);
+    const float multiplier = inverse_stddev * scale;
+    const float bias = offset - exp_x * inverse_stddev * scale;
+    for (int j = 0, index = i * num_features; j < num_features; ++j, ++index) {
+      const float value = temp_buffer[j] * multiplier + bias;
+      out_ptr[index] = PodQuantize(value, out_zero_point, out_inverse_scale);
+    }
+  }
+  return kTfLiteOk;
+}
+
+TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) {
+  const TfLiteTensor* input =
+      &context->tensors[node->inputs->data[kInputIndex]];
+  TfLiteTensor* output = &context->tensors[node->outputs->data[kOutputIndex]];
+  TfLiteTensor scale_tensor = context->tensors[node->inputs->data[kScaleIndex]];
+  TfLiteTensor offset_tensor =
+      context->tensors[node->inputs->data[kOffsetIndex]];
+  float scale = 1.0;
+  float offset = 0.0;
+  if (input->type == kTfLiteUInt8) {
+    scale = PodDequantize(scale_tensor, 0);
+    offset = PodDequantize(offset_tensor, 0);
+  } else {
+    scale = scale_tensor.data.f[0];
+    offset = offset_tensor.data.f[0];
+  }
+
+  TfLiteTensor* axis = &context->tensors[node->inputs->data[kAxisIndex]];
+  int num_axis = static_cast<int>(tflite::NumElements(axis));
+  // For backward compatibility reasons, we handle the default layer norm for
+  // last channel as below.
+  if (num_axis == 1 && (axis->data.i32[0] == -1 ||
+                        axis->data.i32[0] == (input->dims->size - 1))) {
+    if (input->type == kTfLiteUInt8) {
+      return DefaultLayerNorm(input, scale, offset, output);
+    } else if (input->type == kTfLiteFloat32) {
+      return DefaultLayerNormFloat(input, scale, offset, output);
+    } else {
+      TF_LITE_ENSURE_MSG(context, false,
+                         "Input should be eith Uint8 or Float32.");
+    }
+  }
+
+  std::vector<int> resolved_axis(num_axis);
+  // Resolve axis.
+  int num_resolved_axis = 0;
+  if (!ResolveAxis(input->dims->size, axis->data.i32, num_axis,
+                   &resolved_axis[0], &num_resolved_axis)) {
+    return kTfLiteError;
+  }
+
+  return FlexibleLayerNorm(input, scale, offset, &resolved_axis[0],
+                           num_resolved_axis, output);
+}
+
+}  // namespace
+
+TfLiteRegistration* Register_LAYER_NORM() {
+  static TfLiteRegistration r = {nullptr, nullptr, Resize, Eval};
+  return &r;
+}
+
+}  // namespace custom
+}  // namespace ops
+}  // namespace seq_flow_lite
diff --git a/native/tensorflow_models/seq_flow_lite/tflite_ops/layer_norm.h b/native/tensorflow_models/seq_flow_lite/tflite_ops/layer_norm.h
new file mode 100644
index 0000000..6d84ca4
--- /dev/null
+++ b/native/tensorflow_models/seq_flow_lite/tflite_ops/layer_norm.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+#ifndef LEARNING_EXPANDER_POD_DEEP_POD_TFLITE_HANDLERS_LAYER_NORM_H_
+#define LEARNING_EXPANDER_POD_DEEP_POD_TFLITE_HANDLERS_LAYER_NORM_H_
+
+#include "tensorflow/lite/kernels/register.h"
+
+namespace seq_flow_lite {
+namespace ops {
+namespace custom {
+
+TfLiteRegistration* Register_LAYER_NORM();
+
+}  // namespace custom
+}  // namespace ops
+}  // namespace seq_flow_lite
+
+#endif  // LEARNING_EXPANDER_POD_DEEP_POD_TFLITE_HANDLERS_LAYER_NORM_H_
diff --git a/native/tensorflow_models/seq_flow_lite/tflite_ops/quantization_util.h b/native/tensorflow_models/seq_flow_lite/tflite_ops/quantization_util.h
new file mode 100644
index 0000000..7f2db41
--- /dev/null
+++ b/native/tensorflow_models/seq_flow_lite/tflite_ops/quantization_util.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+#ifndef TENSORFLOW_MODELS_SEQUENCE_PROJECTION_TFLITE_OPS_QUANTIZATION_UTIL_H_
+#define TENSORFLOW_MODELS_SEQUENCE_PROJECTION_TFLITE_OPS_QUANTIZATION_UTIL_H_
+
+#include <algorithm>
+#include <cmath>
+
+#include "tensorflow/lite/context.h"
+
+namespace seq_flow_lite {
+
+// Returns the original (dequantized) value of 8bit value.
+inline float PodDequantizeValue(const TfLiteTensor& tensor, uint8_t value) {
+  const int32_t zero_point = tensor.params.zero_point;
+  const float scale = tensor.params.scale;
+  return (static_cast<int32_t>(value) - zero_point) * scale;
+}
+
+// Returns the original (dequantized) value of the 'index'-th element of
+// 'tensor.
+inline float PodDequantize(const TfLiteTensor& tensor, int index) {
+  return PodDequantizeValue(tensor, tensor.data.uint8[index]);
+}
+
+// Quantizes 'value' to 8bit, given the quantization bias (zero_point) and
+// factor (inverse_scale).
+inline uint8_t PodQuantize(float value, int32_t zero_point,
+                           float inverse_scale) {
+  const float integer_value_in_float = value * inverse_scale;
+  const float offset = (integer_value_in_float >= 0.0) ? 0.5f : -0.5f;
+  // NOTE(sfeuz): This assumes value * inverse_scale is within [INT_MIN,
+  // INT_MAX].
+  int32_t integer_value =
+      static_cast<int32_t>(integer_value_in_float + offset) + zero_point;
+  return static_cast<uint8_t>(std::max(std::min(255, integer_value), 0));
+}
+
+}  // namespace seq_flow_lite
+
+#endif  // TENSORFLOW_MODELS_SEQUENCE_PROJECTION_TFLITE_OPS_QUANTIZATION_UTIL_H_
diff --git a/native/utils/bert_tokenizer.cc b/native/utils/bert_tokenizer.cc
new file mode 100644
index 0000000..bf9341f
--- /dev/null
+++ b/native/utils/bert_tokenizer.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/bert_tokenizer.h"
+
+#include <string>
+
+#include "annotator/types.h"
+#include "utils/tokenizer-utils.h"
+#include "utils/utf8/unicodetext.h"
+#include "utils/utf8/unilib.h"
+#include "absl/strings/string_view.h"
+
+namespace libtextclassifier3 {
+
+FlatHashMapBackedWordpiece::FlatHashMapBackedWordpiece(
+    const std::vector<std::string>& vocab)
+    : vocab_{vocab} {
+  for (int i = 0; i < vocab_.size(); ++i) {
+    index_map_[vocab_[i]] = i;
+  }
+}
+
+LookupStatus FlatHashMapBackedWordpiece::Contains(absl::string_view key,
+                                                  bool* value) const {
+  *value = index_map_.contains(key);
+  return LookupStatus();
+}
+
+bool FlatHashMapBackedWordpiece::LookupId(const absl::string_view key,
+                                          int* result) const {
+  auto it = index_map_.find(key);
+  if (it == index_map_.end()) {
+    return false;
+  }
+  *result = it->second;
+  return true;
+}
+
+bool FlatHashMapBackedWordpiece::LookupWord(int vocab_id,
+                                            absl::string_view* result) const {
+  if (vocab_id >= vocab_.size() || vocab_id < 0) {
+    return false;
+  }
+  *result = vocab_[vocab_id];
+  return true;
+}
+
+TokenizerResult BertTokenizer::TokenizeSingleToken(const std::string& token) {
+  std::vector<std::string> tokens = {token};
+  return BertTokenizer::Tokenize(tokens);
+}
+
+TokenizerResult BertTokenizer::Tokenize(const std::string& input) {
+  std::vector<std::string> tokens = PreTokenize(input);
+  return BertTokenizer::Tokenize(tokens);
+}
+
+TokenizerResult BertTokenizer::Tokenize(
+    const std::vector<std::string>& tokens) {
+  WordpieceTokenizerResult result;
+  std::vector<std::string>& subwords = result.subwords;
+  std::vector<int>& wp_absolute_begin_offset = result.wp_begin_offset;
+  std::vector<int>& wp_absolute_end_offset = result.wp_end_offset;
+
+  for (int token_index = 0; token_index < tokens.size(); token_index++) {
+    auto& token = tokens[token_index];
+    int num_word_pieces = 0;
+    LookupStatus status = WordpieceTokenize(
+        token, options_.max_bytes_per_token, options_.max_chars_per_subtoken,
+        options_.suffix_indicator, options_.use_unknown_token,
+        options_.unknown_token, options_.split_unknown_chars, &vocab_,
+        &subwords, &wp_absolute_begin_offset, &wp_absolute_end_offset,
+        &num_word_pieces);
+
+    if (!status.success) {
+      return std::move(result);
+    }
+  }
+
+  return std::move(result);
+}
+
+// This replicates how the original bert_tokenizer from the tflite-support
+// library pretokenize text by using regex_split with these default regexes.
+// It splits the text on spaces, punctuations and chinese characters and
+// output all the tokens except spaces.
+// So far, the only difference between this and the original implementation
+// we are aware of is that the original regexes has 8 ranges of chinese
+// unicodes. We have all these 8 ranges plus two extra ranges.
+std::vector<std::string> BertTokenizer::PreTokenize(
+    const absl::string_view input) {
+  const std::vector<Token> tokens =
+      TokenizeOnWhiteSpacePunctuationAndChineseLetter(input);
+  std::vector<std::string> token_texts;
+  std::transform(tokens.begin(), tokens.end(), std::back_inserter(token_texts),
+                 [](Token const& token) { return std::move(token.value); });
+
+  return token_texts;
+}
+
+}  // namespace libtextclassifier3
diff --git a/native/utils/bert_tokenizer.h b/native/utils/bert_tokenizer.h
new file mode 100644
index 0000000..eb5f978
--- /dev/null
+++ b/native/utils/bert_tokenizer.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_UTILS_BERT_TOKENIZER_H_
+#define LIBTEXTCLASSIFIER_UTILS_BERT_TOKENIZER_H_
+
+#include <fstream>
+#include <string>
+#include <vector>
+
+#include "utils/wordpiece_tokenizer.h"
+#include "absl/container/flat_hash_map.h"
+#include "tensorflow_lite_support/cc/text/tokenizers/tokenizer.h"
+#include "tensorflow_lite_support/cc/utils/common_utils.h"
+
+namespace libtextclassifier3 {
+
+using ::tflite::support::text::tokenizer::TokenizerResult;
+using ::tflite::support::utils::LoadVocabFromBuffer;
+using ::tflite::support::utils::LoadVocabFromFile;
+
+constexpr int kDefaultMaxBytesPerToken = 100;
+constexpr int kDefaultMaxCharsPerSubToken = 100;
+constexpr char kDefaultSuffixIndicator[] = "##";
+constexpr bool kDefaultUseUnknownToken = true;
+constexpr char kDefaultUnknownToken[] = "[UNK]";
+constexpr bool kDefaultSplitUnknownChars = false;
+
+// Result of wordpiece tokenization including subwords and offsets.
+// Example:
+// input:                tokenize     me  please
+// subwords:             token ##ize  me  plea ##se
+// wp_begin_offset:     [0,      5,   9,  12,    16]
+// wp_end_offset:       [     5,    8,  11,   16,  18]
+// row_lengths:         [2,          1,  1]
+struct WordpieceTokenizerResult
+    : tflite::support::text::tokenizer::TokenizerResult {
+  std::vector<int> wp_begin_offset;
+  std::vector<int> wp_end_offset;
+  std::vector<int> row_lengths;
+};
+
+// Options to create a BertTokenizer.
+struct BertTokenizerOptions {
+  int max_bytes_per_token = kDefaultMaxBytesPerToken;
+  int max_chars_per_subtoken = kDefaultMaxCharsPerSubToken;
+  std::string suffix_indicator = kDefaultSuffixIndicator;
+  bool use_unknown_token = kDefaultUseUnknownToken;
+  std::string unknown_token = kDefaultUnknownToken;
+  bool split_unknown_chars = kDefaultSplitUnknownChars;
+};
+
+// A flat-hash-map based implementation of WordpieceVocab, used in
+// BertTokenizer to invoke tensorflow::text::WordpieceTokenize within.
+class FlatHashMapBackedWordpiece : public WordpieceVocab {
+ public:
+  explicit FlatHashMapBackedWordpiece(const std::vector<std::string>& vocab);
+
+  LookupStatus Contains(absl::string_view key, bool* value) const override;
+  bool LookupId(absl::string_view key, int* result) const;
+  bool LookupWord(int vocab_id, absl::string_view* result) const;
+  int VocabularySize() const { return vocab_.size(); }
+
+ private:
+  // All words indexed position in vocabulary file.
+  std::vector<std::string> vocab_;
+  absl::flat_hash_map<absl::string_view, int> index_map_;
+};
+
+// Wordpiece tokenizer for bert models. Initialized with a vocab file or vector.
+class BertTokenizer : public tflite::support::text::tokenizer::Tokenizer {
+ public:
+  // Initialize the tokenizer from vocab vector and tokenizer configs.
+  explicit BertTokenizer(const std::vector<std::string>& vocab,
+                         const BertTokenizerOptions& options = {})
+      : vocab_{FlatHashMapBackedWordpiece(vocab)}, options_{options} {}
+
+  // Initialize the tokenizer from file path to vocab and tokenizer configs.
+  explicit BertTokenizer(const std::string& path_to_vocab,
+                         const BertTokenizerOptions& options = {})
+      : BertTokenizer(LoadVocabFromFile(path_to_vocab), options) {}
+
+  // Initialize the tokenizer from buffer and size of vocab and tokenizer
+  // configs.
+  BertTokenizer(const char* vocab_buffer_data, size_t vocab_buffer_size,
+                const BertTokenizerOptions& options = {})
+      : BertTokenizer(LoadVocabFromBuffer(vocab_buffer_data, vocab_buffer_size),
+                      options) {}
+
+  // Perform tokenization, first tokenize the input and then find the subwords.
+  // return tokenized results containing the subwords.
+  TokenizerResult Tokenize(const std::string& input) override;
+
+  // Perform tokenization on a single token, return tokenized results containing
+  // the subwords.
+  TokenizerResult TokenizeSingleToken(const std::string& token);
+
+  // Perform tokenization, return tokenized results containing the subwords.
+  TokenizerResult Tokenize(const std::vector<std::string>& tokens);
+
+  // Check if a certain key is included in the vocab.
+  LookupStatus Contains(const absl::string_view key, bool* value) const {
+    return vocab_.Contains(key, value);
+  }
+
+  // Find the id of a wordpiece.
+  bool LookupId(absl::string_view key, int* result) const override {
+    return vocab_.LookupId(key, result);
+  }
+
+  // Find the wordpiece from an id.
+  bool LookupWord(int vocab_id, absl::string_view* result) const override {
+    return vocab_.LookupWord(vocab_id, result);
+  }
+
+  int VocabularySize() const { return vocab_.VocabularySize(); }
+
+  static std::vector<std::string> PreTokenize(const absl::string_view input);
+
+ private:
+  FlatHashMapBackedWordpiece vocab_;
+  BertTokenizerOptions options_;
+};
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_UTILS_BERT_TOKENIZER_H_
diff --git a/native/utils/bert_tokenizer_test.cc b/native/utils/bert_tokenizer_test.cc
new file mode 100644
index 0000000..3c4e52c
--- /dev/null
+++ b/native/utils/bert_tokenizer_test.cc
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/bert_tokenizer.h"
+
+#include "utils/test-data-test-utils.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+
+namespace libtextclassifier3 {
+
+using ::testing::ElementsAre;
+
+namespace {
+constexpr char kTestVocabPath[] = "annotator/pod_ner/test_data/vocab.txt";
+
+void AssertTokenizerResults(std::unique_ptr<BertTokenizer> tokenizer) {
+  auto results = tokenizer->Tokenize("i'm question");
+
+  EXPECT_THAT(results.subwords, ElementsAre("i", "'", "m", "question"));
+}
+
+TEST(BertTokenizerTest, TestTokenizerCreationFromBuffer) {
+  std::string buffer = GetTestFileContent(kTestVocabPath);
+
+  auto tokenizer =
+      absl::make_unique<BertTokenizer>(buffer.data(), buffer.size());
+
+  AssertTokenizerResults(std::move(tokenizer));
+}
+
+TEST(BertTokenizerTest, TestTokenizerCreationFromFile) {
+  auto tokenizer =
+      absl::make_unique<BertTokenizer>(GetTestDataPath(kTestVocabPath));
+
+  AssertTokenizerResults(std::move(tokenizer));
+}
+
+TEST(BertTokenizerTest, TestTokenizerCreationFromVector) {
+  std::vector<std::string> vocab;
+  vocab.emplace_back("i");
+  vocab.emplace_back("'");
+  vocab.emplace_back("m");
+  vocab.emplace_back("question");
+  auto tokenizer = absl::make_unique<BertTokenizer>(vocab);
+
+  AssertTokenizerResults(std::move(tokenizer));
+}
+
+TEST(BertTokenizerTest, TestTokenizerMultipleRows) {
+  auto tokenizer =
+      absl::make_unique<BertTokenizer>(GetTestDataPath(kTestVocabPath));
+
+  auto results = tokenizer->Tokenize("i'm questionansweraskask");
+
+  EXPECT_THAT(results.subwords, ElementsAre("i", "'", "m", "question", "##ans",
+                                            "##wer", "##ask", "##ask"));
+}
+
+TEST(BertTokenizerTest, TestTokenizerUnknownTokens) {
+  std::vector<std::string> vocab;
+  vocab.emplace_back("i");
+  vocab.emplace_back("'");
+  vocab.emplace_back("m");
+  vocab.emplace_back("question");
+  auto tokenizer = absl::make_unique<BertTokenizer>(vocab);
+
+  auto results = tokenizer->Tokenize("i'm questionansweraskask");
+
+  EXPECT_THAT(results.subwords,
+              ElementsAre("i", "'", "m", kDefaultUnknownToken));
+}
+
+TEST(BertTokenizerTest, TestLookupId) {
+  std::vector<std::string> vocab;
+  vocab.emplace_back("i");
+  vocab.emplace_back("'");
+  vocab.emplace_back("m");
+  vocab.emplace_back("question");
+  auto tokenizer = absl::make_unique<BertTokenizer>(vocab);
+
+  int i;
+  ASSERT_FALSE(tokenizer->LookupId("iDontExist", &i));
+
+  ASSERT_TRUE(tokenizer->LookupId("i", &i));
+  ASSERT_EQ(i, 0);
+  ASSERT_TRUE(tokenizer->LookupId("'", &i));
+  ASSERT_EQ(i, 1);
+  ASSERT_TRUE(tokenizer->LookupId("m", &i));
+  ASSERT_EQ(i, 2);
+  ASSERT_TRUE(tokenizer->LookupId("question", &i));
+  ASSERT_EQ(i, 3);
+}
+
+TEST(BertTokenizerTest, TestLookupWord) {
+  std::vector<std::string> vocab;
+  vocab.emplace_back("i");
+  vocab.emplace_back("'");
+  vocab.emplace_back("m");
+  vocab.emplace_back("question");
+  auto tokenizer = absl::make_unique<BertTokenizer>(vocab);
+
+  absl::string_view result;
+  ASSERT_FALSE(tokenizer->LookupWord(6, &result));
+
+  ASSERT_TRUE(tokenizer->LookupWord(0, &result));
+  ASSERT_EQ(result, "i");
+  ASSERT_TRUE(tokenizer->LookupWord(1, &result));
+  ASSERT_EQ(result, "'");
+  ASSERT_TRUE(tokenizer->LookupWord(2, &result));
+  ASSERT_EQ(result, "m");
+  ASSERT_TRUE(tokenizer->LookupWord(3, &result));
+  ASSERT_EQ(result, "question");
+}
+
+TEST(BertTokenizerTest, TestContains) {
+  std::vector<std::string> vocab;
+  vocab.emplace_back("i");
+  vocab.emplace_back("'");
+  vocab.emplace_back("m");
+  vocab.emplace_back("question");
+  auto tokenizer = absl::make_unique<BertTokenizer>(vocab);
+
+  bool result;
+  tokenizer->Contains("iDontExist", &result);
+  ASSERT_FALSE(result);
+
+  tokenizer->Contains("i", &result);
+  ASSERT_TRUE(result);
+  tokenizer->Contains("'", &result);
+  ASSERT_TRUE(result);
+  tokenizer->Contains("m", &result);
+  ASSERT_TRUE(result);
+  tokenizer->Contains("question", &result);
+  ASSERT_TRUE(result);
+}
+
+TEST(BertTokenizerTest, TestLVocabularySize) {
+  std::vector<std::string> vocab;
+  vocab.emplace_back("i");
+  vocab.emplace_back("'");
+  vocab.emplace_back("m");
+  vocab.emplace_back("question");
+  auto tokenizer = absl::make_unique<BertTokenizer>(vocab);
+
+  ASSERT_EQ(tokenizer->VocabularySize(), 4);
+}
+
+TEST(BertTokenizerTest, SimpleEnglishWithPunctuation) {
+  absl::string_view input = "I am fine, thanks!";
+
+  std::vector<std::string> tokens = BertTokenizer::PreTokenize(input);
+
+  EXPECT_THAT(tokens, testing::ElementsAreArray(
+                          {"I", "am", "fine", ",", "thanks", "!"}));
+}
+}  // namespace
+}  // namespace libtextclassifier3
diff --git a/native/utils/codepoint-range.fbs b/native/utils/codepoint-range.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/container/bit-vector.fbs b/native/utils/container/bit-vector.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/flatbuffers/flatbuffers.fbs b/native/utils/flatbuffers/flatbuffers.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/grammar/analyzer.cc b/native/utils/grammar/analyzer.cc
index fcba217..b760442 100644
--- a/native/utils/grammar/analyzer.cc
+++ b/native/utils/grammar/analyzer.cc
@@ -46,15 +46,24 @@
                               : nullptr) {}
 
 StatusOr<std::vector<EvaluatedDerivation>> Analyzer::Parse(
-    const TextContext& input, UnsafeArena* arena) const {
+    const TextContext& input, UnsafeArena* arena,
+    bool deduplicate_derivations) const {
   std::vector<EvaluatedDerivation> result;
 
+  std::vector<Derivation> derivations = parser_.Parse(input, arena);
+  if (deduplicate_derivations) {
+    derivations = DeduplicateDerivations<Derivation>(derivations);
+  }
   // Evaluate each derivation.
-  for (const Derivation& derivation :
-       ValidDeduplicatedDerivations(parser_.Parse(input, arena))) {
-    TC3_ASSIGN_OR_RETURN(const SemanticValue* value,
-                         semantic_evaluator_.Eval(input, derivation, arena));
-    result.emplace_back(EvaluatedDerivation{std::move(derivation), value});
+  for (const Derivation& derivation : derivations) {
+    if (derivation.IsValid()) {
+      TC3_ASSIGN_OR_RETURN(const SemanticValue* value,
+                           semantic_evaluator_.Eval(input, derivation, arena));
+      result.emplace_back(
+          EvaluatedDerivation{{/*parse_tree=*/derivation.parse_tree,
+                               /*rule_id=*/derivation.rule_id},
+                              /*semantic_value=*/value});
+    }
   }
 
   return result;
@@ -62,8 +71,9 @@
 
 StatusOr<std::vector<EvaluatedDerivation>> Analyzer::Parse(
     const UnicodeText& text, const std::vector<Locale>& locales,
-    UnsafeArena* arena) const {
-  return Parse(BuildTextContextForInput(text, locales), arena);
+    UnsafeArena* arena, bool deduplicate_derivations) const {
+  return Parse(BuildTextContextForInput(text, locales), arena,
+               deduplicate_derivations);
 }
 
 TextContext Analyzer::BuildTextContextForInput(
diff --git a/native/utils/grammar/analyzer.h b/native/utils/grammar/analyzer.h
index c83c622..6d1dd46 100644
--- a/native/utils/grammar/analyzer.h
+++ b/native/utils/grammar/analyzer.h
@@ -38,11 +38,13 @@
                     const Tokenizer* tokenizer);
 
   // Parses and evaluates an input.
-  StatusOr<std::vector<EvaluatedDerivation>> Parse(const TextContext& input,
-                                                   UnsafeArena* arena) const;
+  StatusOr<std::vector<EvaluatedDerivation>> Parse(
+      const TextContext& input, UnsafeArena* arena,
+      bool deduplicate_derivations = true) const;
+
   StatusOr<std::vector<EvaluatedDerivation>> Parse(
       const UnicodeText& text, const std::vector<Locale>& locales,
-      UnsafeArena* arena) const;
+      UnsafeArena* arena, bool deduplicate_derivations = true) const;
 
   // Pre-processes an input text for parsing.
   TextContext BuildTextContextForInput(
diff --git a/native/utils/grammar/analyzer_test.cc b/native/utils/grammar/analyzer_test.cc
index 4950fb4..3905b70 100644
--- a/native/utils/grammar/analyzer_test.cc
+++ b/native/utils/grammar/analyzer_test.cc
@@ -70,9 +70,8 @@
     EXPECT_THAT(results, SizeIs(1));
 
     // Check parse tree.
-    EXPECT_THAT(
-        results[0].derivation,
-        IsDerivation(kMonth /* rule_id */, 13 /* begin */, 20 /* end */));
+    EXPECT_THAT(results[0], IsDerivation(kMonth /* rule_id */, 13 /* begin */,
+                                         20 /* end */));
 
     // Check semantic result.
     EXPECT_EQ(results[0].value->Value<int32>(), 1);
@@ -88,7 +87,7 @@
     EXPECT_THAT(results, SizeIs(1));
 
     // Check parse tree.
-    EXPECT_THAT(results[0].derivation,
+    EXPECT_THAT(results[0],
                 IsDerivation(kMonth /* rule_id */, 0 /* begin */, 8 /* end */));
 
     // Check semantic result.
diff --git a/native/utils/grammar/evaluated-derivation.h b/native/utils/grammar/evaluated-derivation.h
index bac252a..4ae409d 100644
--- a/native/utils/grammar/evaluated-derivation.h
+++ b/native/utils/grammar/evaluated-derivation.h
@@ -23,8 +23,7 @@
 namespace libtextclassifier3::grammar {
 
 // A parse tree for a root rule and its semantic value.
-struct EvaluatedDerivation {
-  Derivation derivation;
+struct EvaluatedDerivation : public Derivation {
   const SemanticValue* value;
 };
 
diff --git a/native/utils/grammar/parsing/derivation.cc b/native/utils/grammar/parsing/derivation.cc
index 6618654..4298be5 100644
--- a/native/utils/grammar/parsing/derivation.cc
+++ b/native/utils/grammar/parsing/derivation.cc
@@ -17,6 +17,7 @@
 #include "utils/grammar/parsing/derivation.h"
 
 #include <algorithm>
+#include <vector>
 
 namespace libtextclassifier3::grammar {
 
@@ -37,59 +38,11 @@
   return result;
 }
 
-std::vector<Derivation> DeduplicateDerivations(
-    const std::vector<Derivation>& derivations) {
-  std::vector<Derivation> sorted_candidates = derivations;
-  std::stable_sort(sorted_candidates.begin(), sorted_candidates.end(),
-                   [](const Derivation& a, const Derivation& b) {
-                     // Sort by id.
-                     if (a.rule_id != b.rule_id) {
-                       return a.rule_id < b.rule_id;
-                     }
-
-                     // Sort by increasing start.
-                     if (a.parse_tree->codepoint_span.first !=
-                         b.parse_tree->codepoint_span.first) {
-                       return a.parse_tree->codepoint_span.first <
-                              b.parse_tree->codepoint_span.first;
-                     }
-
-                     // Sort by decreasing end.
-                     return a.parse_tree->codepoint_span.second >
-                            b.parse_tree->codepoint_span.second;
-                   });
-
-  // Deduplicate by overlap.
-  std::vector<Derivation> result;
-  for (int i = 0; i < sorted_candidates.size(); i++) {
-    const Derivation& candidate = sorted_candidates[i];
-    bool eliminated = false;
-
-    // Due to the sorting above, the candidate can only be completely
-    // intersected by a match before it in the sorted order.
-    for (int j = i - 1; j >= 0; j--) {
-      if (sorted_candidates[j].rule_id != candidate.rule_id) {
-        break;
-      }
-      if (sorted_candidates[j].parse_tree->codepoint_span.first <=
-              candidate.parse_tree->codepoint_span.first &&
-          sorted_candidates[j].parse_tree->codepoint_span.second >=
-              candidate.parse_tree->codepoint_span.second) {
-        eliminated = true;
-        break;
-      }
-    }
-    if (!eliminated) {
-      result.push_back(candidate);
-    }
-  }
-  return result;
-}
-
 std::vector<Derivation> ValidDeduplicatedDerivations(
     const std::vector<Derivation>& derivations) {
   std::vector<Derivation> result;
-  for (const Derivation& derivation : DeduplicateDerivations(derivations)) {
+  for (const Derivation& derivation :
+       DeduplicateDerivations<Derivation>(derivations)) {
     // Check that asserts are fulfilled.
     if (derivation.IsValid()) {
       result.push_back(derivation);
diff --git a/native/utils/grammar/parsing/derivation.h b/native/utils/grammar/parsing/derivation.h
index 70e169d..2196495 100644
--- a/native/utils/grammar/parsing/derivation.h
+++ b/native/utils/grammar/parsing/derivation.h
@@ -30,6 +30,8 @@
 
   // Checks that all assertions are fulfilled.
   bool IsValid() const;
+  int64 GetRuleId() const { return rule_id; }
+  const ParseTree* GetParseTree() const { return parse_tree; }
 };
 
 // Deduplicates rule derivations by containing overlap.
@@ -38,8 +40,58 @@
 // will get two rule derivations when the suffix is present: one with and one
 // without the suffix. We therefore deduplicate by containing overlap, viz. from
 // two candidates we keep the longer one if it completely contains the shorter.
-std::vector<Derivation> DeduplicateDerivations(
-    const std::vector<Derivation>& derivations);
+// This factory function works with any type T that extends Derivation.
+template <typename T, typename std::enable_if<std::is_base_of<
+                          Derivation, T>::value>::type* = nullptr>
+// std::vector<T> DeduplicateDerivations(const std::vector<T>& derivations);
+std::vector<T> DeduplicateDerivations(const std::vector<T>& derivations) {
+  std::vector<T> sorted_candidates = derivations;
+
+  std::stable_sort(sorted_candidates.begin(), sorted_candidates.end(),
+                   [](const T& a, const T& b) {
+                     // Sort by id.
+                     if (a.GetRuleId() != b.GetRuleId()) {
+                       return a.GetRuleId() < b.GetRuleId();
+                     }
+
+                     // Sort by increasing start.
+                     if (a.GetParseTree()->codepoint_span.first !=
+                         b.GetParseTree()->codepoint_span.first) {
+                       return a.GetParseTree()->codepoint_span.first <
+                              b.GetParseTree()->codepoint_span.first;
+                     }
+
+                     // Sort by decreasing end.
+                     return a.GetParseTree()->codepoint_span.second >
+                            b.GetParseTree()->codepoint_span.second;
+                   });
+
+  // Deduplicate by overlap.
+  std::vector<T> result;
+  for (int i = 0; i < sorted_candidates.size(); i++) {
+    const T& candidate = sorted_candidates[i];
+    bool eliminated = false;
+
+    // Due to the sorting above, the candidate can only be completely
+    // intersected by a match before it in the sorted order.
+    for (int j = i - 1; j >= 0; j--) {
+      if (sorted_candidates[j].rule_id != candidate.rule_id) {
+        break;
+      }
+      if (sorted_candidates[j].parse_tree->codepoint_span.first <=
+              candidate.parse_tree->codepoint_span.first &&
+          sorted_candidates[j].parse_tree->codepoint_span.second >=
+              candidate.parse_tree->codepoint_span.second) {
+        eliminated = true;
+        break;
+      }
+    }
+    if (!eliminated) {
+      result.push_back(candidate);
+    }
+  }
+  return result;
+}
 
 // Deduplicates and validates rule derivations.
 std::vector<Derivation> ValidDeduplicatedDerivations(
diff --git a/native/utils/grammar/rules.fbs b/native/utils/grammar/rules.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/grammar/semantics/expression.fbs b/native/utils/grammar/semantics/expression.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/grammar/testing/value.fbs b/native/utils/grammar/testing/value.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/grammar/utils/rules.cc b/native/utils/grammar/utils/rules.cc
index 661514a..1b545a6 100644
--- a/native/utils/grammar/utils/rules.cc
+++ b/native/utils/grammar/utils/rules.cc
@@ -264,7 +264,7 @@
 }
 
 std::vector<Rules::RhsElement> Rules::ResolveFillers(
-    const std::vector<RhsElement>& rhs) {
+    const std::vector<RhsElement>& rhs, int shard) {
   std::vector<RhsElement> result;
   for (int i = 0; i < rhs.size();) {
     if (i == rhs.size() - 1 || IsNonterminalOfName(rhs[i], kFiller) ||
@@ -284,15 +284,27 @@
                            /*is_optional=*/false);
     if (rhs[i + 1].is_optional) {
       // <a_with_tokens> ::= <a>
-      Add(with_tokens_nonterminal, {rhs[i]});
+      Add(with_tokens_nonterminal, {rhs[i]},
+          /*callback=*/kNoCallback,
+          /*callback_param=*/0,
+          /*max_whitespace_gap=*/-1,
+          /*case_sensitive=*/false, shard);
     } else {
       // <a_with_tokens> ::= <a> <token>
-      Add(with_tokens_nonterminal, {rhs[i], token});
+      Add(with_tokens_nonterminal, {rhs[i], token},
+          /*callback=*/kNoCallback,
+          /*callback_param=*/0,
+          /*max_whitespace_gap=*/-1,
+          /*case_sensitive=*/false, shard);
     }
     // <a_with_tokens> ::= <a_with_tokens> <token>
     const RhsElement with_tokens(with_tokens_nonterminal,
                                  /*is_optional=*/false);
-    Add(with_tokens_nonterminal, {with_tokens, token});
+    Add(with_tokens_nonterminal, {with_tokens, token},
+        /*callback=*/kNoCallback,
+        /*callback_param=*/0,
+        /*max_whitespace_gap=*/-1,
+        /*case_sensitive=*/false, shard);
     result.push_back(with_tokens);
     i += 2;
   }
@@ -300,8 +312,8 @@
 }
 
 std::vector<Rules::RhsElement> Rules::OptimizeRhs(
-    const std::vector<RhsElement>& rhs) {
-  return ResolveFillers(ResolveAnchors(rhs));
+    const std::vector<RhsElement>& rhs, int shard) {
+  return ResolveFillers(ResolveAnchors(rhs), shard);
 }
 
 void Rules::Add(const int lhs, const std::vector<RhsElement>& rhs,
diff --git a/native/utils/grammar/utils/rules.h b/native/utils/grammar/utils/rules.h
index 4931e2f..c8b2a70 100644
--- a/native/utils/grammar/utils/rules.h
+++ b/native/utils/grammar/utils/rules.h
@@ -188,7 +188,8 @@
       std::vector<bool>* omit_these);
 
   // Applies optimizations to the right hand side of a rule.
-  std::vector<RhsElement> OptimizeRhs(const std::vector<RhsElement>& rhs);
+  std::vector<RhsElement> OptimizeRhs(const std::vector<RhsElement>& rhs,
+                                      int shard = 0);
 
   // Removes start and end anchors in case they are followed (respectively
   // preceded) by unbounded filler.
@@ -206,7 +207,8 @@
   // `<a_with_tokens> ::= <a>`
   // `<a_with_tokens> ::= <a_with_tokens> <token>`
   // In this each occurrence of `<a>` can start a sequence of tokens.
-  std::vector<RhsElement> ResolveFillers(const std::vector<RhsElement>& rhs);
+  std::vector<RhsElement> ResolveFillers(const std::vector<RhsElement>& rhs,
+                                         int shard = 0);
 
   // Checks whether an element denotes a specific nonterminal.
   bool IsNonterminalOfName(const RhsElement& element,
diff --git a/native/utils/hash/cityhash.cc b/native/utils/hash/cityhash.cc
new file mode 100644
index 0000000..e2a8596
--- /dev/null
+++ b/native/utils/hash/cityhash.cc
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/hash/cityhash.h"
+
+#include <cstdint>
+
+#include "absl/base/internal/endian.h"
+#include "absl/numeric/int128.h"
+
+namespace tc3farmhash {
+namespace {
+// Some primes between 2^63 and 2^64 for various uses.
+static const uint64_t k0 = 0xa5b85c5e198ed849ULL;
+static const uint64_t k1 = 0x8d58ac26afe12e47ULL;
+static const uint64_t k2 = 0xc47b6e9e3a970ed3ULL;
+static const uint64_t k3 = 0xc70f6907e782aa0bULL;
+
+// Hash 128 input bits down to 64 bits of output.
+// This is intended to be a reasonably good hash function.
+// It may change from time to time.
+inline uint64_t Hash128to64(const absl::uint128 x) {
+  // Murmur-inspired hashing.
+  const uint64_t kMul = 0xc6a4a7935bd1e995ULL;
+  uint64_t a = (absl::Uint128Low64(x) ^ absl::Uint128High64(x)) * kMul;
+  a ^= (a >> 47);
+  uint64_t b = (absl::Uint128High64(x) ^ a) * kMul;
+  b ^= (b >> 47);
+  b *= kMul;
+  return b;
+}
+
+uint64_t HashLen16(uint64_t u, uint64_t v) {
+  return Hash128to64(absl::MakeUint128(u, v));
+}
+
+static uint64_t Rotate(uint64_t val, size_t shift) {
+  assert(shift <= 63);
+  return (val >> shift) | (val << (-shift & 63));
+}
+
+static uint64_t ShiftMix(uint64_t val) { return val ^ (val >> 47); }
+
+uint64_t HashLen0to16(const char *s, size_t len) {
+  assert(len <= 16);
+  if (len > 8) {
+    uint64_t a = absl::little_endian::Load64(s);
+    uint64_t b = absl::little_endian::Load64(s + len - 8);
+    return HashLen16(a, Rotate(b + len, len)) ^ b;
+  }
+  if (len >= 4) {
+    uint64_t a = absl::little_endian::Load32(s);
+    return HashLen16(len + (a << 3), absl::little_endian::Load32(s + len - 4));
+  }
+  if (len > 0) {
+    uint8_t a = s[0];
+    uint8_t b = s[len >> 1];
+    uint8_t c = s[len - 1];
+    uint32_t y = static_cast<uint32_t>(a) + (static_cast<uint32_t>(b) << 8);
+    uint32_t z = len + (static_cast<uint32_t>(c) << 2);
+    return ShiftMix(y * k2 ^ z * k3) * k2;
+  }
+  return k2;
+}
+
+// Return a 16-byte hash for 48 bytes.  Quick and dirty.
+// Callers do best to use "random-looking" values for a and b.
+// (For more, see the code review discussion of CL 18799087.)
+std::pair<uint64_t, uint64_t> WeakHashLen32WithSeeds(uint64_t w, uint64_t x,
+                                                     uint64_t y, uint64_t z,
+                                                     uint64_t a, uint64_t b) {
+  a += w;
+  b = Rotate(b + a + z, 51);
+  uint64_t c = a;
+  a += x;
+  a += y;
+  b += Rotate(a, 23);
+  return std::make_pair(a + z, b + c);
+}
+
+// Return a 16-byte hash for s[0] ... s[31], a, and b.  Quick and dirty.
+std::pair<uint64_t, uint64_t> WeakHashLen32WithSeeds(const char *s, uint64_t a,
+                                                     uint64_t b) {
+  return WeakHashLen32WithSeeds(absl::little_endian::Load64(s),
+                                absl::little_endian::Load64(s + 8),
+                                absl::little_endian::Load64(s + 16),
+                                absl::little_endian::Load64(s + 24), a, b);
+}
+
+}  // namespace
+
+// This probably works well for 16-byte strings as well, but it may be overkill
+// in that case.
+static uint64_t HashLen17to32(const char *s, size_t len) {
+  assert(len >= 17);
+  assert(len <= 32);
+  uint64_t a = absl::little_endian::Load64(s) * k1;
+  uint64_t b = absl::little_endian::Load64(s + 8);
+  uint64_t c = absl::little_endian::Load64(s + len - 8) * k2;
+  uint64_t d = absl::little_endian::Load64(s + len - 16) * k0;
+  return HashLen16(Rotate(a - b, 43) + Rotate(c, 30) + d,
+                   a + Rotate(b ^ k3, 20) - c + len);
+}
+
+// Return an 8-byte hash for 33 to 64 bytes.
+static uint64_t HashLen33to64(const char *s, size_t len) {
+  uint64_t z = absl::little_endian::Load64(s + 24);
+  uint64_t a = absl::little_endian::Load64(s) +
+               (len + absl::little_endian::Load64(s + len - 16)) * k0;
+  uint64_t b = Rotate(a + z, 52);
+  uint64_t c = Rotate(a, 37);
+  a += absl::little_endian::Load64(s + 8);
+  c += Rotate(a, 7);
+  a += absl::little_endian::Load64(s + 16);
+  uint64_t vf = a + z;
+  uint64_t vs = b + Rotate(a, 31) + c;
+  a = absl::little_endian::Load64(s + 16) +
+      absl::little_endian::Load64(s + len - 32);
+  z += absl::little_endian::Load64(s + len - 8);
+  b = Rotate(a + z, 52);
+  c = Rotate(a, 37);
+  a += absl::little_endian::Load64(s + len - 24);
+  c += Rotate(a, 7);
+  a += absl::little_endian::Load64(s + len - 16);
+  uint64_t wf = a + z;
+  uint64_t ws = b + Rotate(a, 31) + c;
+  uint64_t r = ShiftMix((vf + ws) * k2 + (wf + vs) * k0);
+  return ShiftMix(r * k0 + vs) * k2;
+}
+
+uint64_t CityHash64(const char *s, size_t len) {
+  if (len <= 32) {
+    if (len <= 16) {
+      return HashLen0to16(s, len);
+    } else {
+      return HashLen17to32(s, len);
+    }
+  } else if (len <= 64) {
+    return HashLen33to64(s, len);
+  }
+
+  // For strings over 64 bytes we hash the end first, and then as we
+  // loop we keep 56 bytes of state: v, w, x, y, and z.
+  uint64_t x = absl::little_endian::Load64(s + len - 40);
+  uint64_t y = absl::little_endian::Load64(s + len - 16) +
+               absl::little_endian::Load64(s + len - 56);
+  uint64_t z = HashLen16(absl::little_endian::Load64(s + len - 48) + len,
+                         absl::little_endian::Load64(s + len - 24));
+  std::pair<uint64_t, uint64_t> v =
+      WeakHashLen32WithSeeds(s + len - 64, len, z);
+  std::pair<uint64_t, uint64_t> w =
+      WeakHashLen32WithSeeds(s + len - 32, y + k1, x);
+  x = x * k1 + absl::little_endian::Load64(s);
+
+  // Decrease len to the nearest multiple of 64, and operate on 64-byte chunks.
+  len = (len - 1) & ~static_cast<size_t>(63);
+  assert(len > 0);
+  assert(len == len / 64 * 64);
+  do {
+    x = Rotate(x + y + v.first + absl::little_endian::Load64(s + 8), 37) * k1;
+    y = Rotate(y + v.second + absl::little_endian::Load64(s + 48), 42) * k1;
+    x ^= w.second;
+    y += v.first + absl::little_endian::Load64(s + 40);
+    z = Rotate(z + w.first, 33) * k1;
+    v = WeakHashLen32WithSeeds(s, v.second * k1, x + w.first);
+    w = WeakHashLen32WithSeeds(s + 32, z + w.second,
+                               y + absl::little_endian::Load64(s + 16));
+    std::swap(z, x);
+    s += 64;
+    len -= 64;
+  } while (len != 0);
+  return HashLen16(HashLen16(v.first, w.first) + ShiftMix(y) * k1 + z,
+                   HashLen16(v.second, w.second) + x);
+}
+}  // namespace tc3farmhash
diff --git a/java/src/com/android/textclassifier/IModelDownloaderCallback.aidl b/native/utils/hash/cityhash.h
similarity index 67%
rename from java/src/com/android/textclassifier/IModelDownloaderCallback.aidl
rename to native/utils/hash/cityhash.h
index 7f9d7fb..9ede3d6 100644
--- a/java/src/com/android/textclassifier/IModelDownloaderCallback.aidl
+++ b/native/utils/hash/cityhash.h
@@ -14,15 +14,14 @@
  * limitations under the License.
  */
 
-package com.android.textclassifier;
+#ifndef LIBTEXTCLASSIFIER_UTILS_HASH_CITYHASH_H_
+#define LIBTEXTCLASSIFIER_UTILS_HASH_CITYHASH_H_
 
-/**
- * Callback for download requests from ModelDownloaderImpl to
- * ModelDownloaderService.
- */
-oneway interface IModelDownloaderCallback {
+#include <cstddef>
+#include <cstdint>
 
-  void onSuccess(long bytesWritten);
+namespace tc3farmhash {
+uint64_t CityHash64(const char *s, size_t len);
+}  // namespace tc3farmhash
 
-  void onFailure(String error);
-}
\ No newline at end of file
+#endif  // LIBTEXTCLASSIFIER_UTILS_HASH_CITYHASH_H_
diff --git a/native/utils/i18n/language-tag.fbs b/native/utils/i18n/language-tag.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/intents/intent-config.fbs b/native/utils/intents/intent-config.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/intents/intent-generator.h b/native/utils/intents/intent-generator.h
index a3c8898..c5cbb1d 100644
--- a/native/utils/intents/intent-generator.h
+++ b/native/utils/intents/intent-generator.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 
 #ifndef LIBTEXTCLASSIFIER_UTILS_INTENTS_INTENT_GENERATOR_H_
 #define LIBTEXTCLASSIFIER_UTILS_INTENTS_INTENT_GENERATOR_H_
diff --git a/native/utils/normalization.fbs b/native/utils/normalization.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/resources.fbs b/native/utils/resources.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/tflite-model-executor.cc b/native/utils/tflite-model-executor.cc
index e491130..36db3e9 100644
--- a/native/utils/tflite-model-executor.cc
+++ b/native/utils/tflite-model-executor.cc
@@ -18,6 +18,7 @@
 
 #include "utils/base/logging.h"
 #include "tensorflow/lite/kernels/register.h"
+#include "tensorflow/lite/schema/schema_generated.h"
 
 // Forward declaration of custom TensorFlow Lite ops for registration.
 namespace tflite {
@@ -33,15 +34,21 @@
 TfLiteRegistration* Register_MUL();
 TfLiteRegistration* Register_RESHAPE();
 TfLiteRegistration* Register_REDUCE_MAX();
+TfLiteRegistration* Register_REDUCE_MIN();
 TfLiteRegistration* Register_REDUCE_ANY();
 TfLiteRegistration* Register_SOFTMAX();
 TfLiteRegistration* Register_GATHER();
+TfLiteRegistration* Register_GATHER_ND();
+TfLiteRegistration* Register_IF();
+TfLiteRegistration* Register_ROUND();
+TfLiteRegistration* Register_ZEROS_LIKE();
 TfLiteRegistration* Register_TRANSPOSE();
 TfLiteRegistration* Register_SUB();
 TfLiteRegistration* Register_DIV();
 TfLiteRegistration* Register_STRIDED_SLICE();
 TfLiteRegistration* Register_EXP();
 TfLiteRegistration* Register_TOPK_V2();
+TfLiteRegistration* Register_SLICE();
 TfLiteRegistration* Register_SPLIT();
 TfLiteRegistration* Register_CAST();
 TfLiteRegistration* Register_MAXIMUM();
@@ -49,6 +56,7 @@
 TfLiteRegistration* Register_NEG();
 TfLiteRegistration* Register_SLICE();
 TfLiteRegistration* Register_LOG();
+TfLiteRegistration* Register_LOGISTIC();
 TfLiteRegistration* Register_SUM();
 TfLiteRegistration* Register_PACK();
 TfLiteRegistration* Register_DEQUANTIZE();
@@ -62,7 +70,7 @@
 TfLiteRegistration* Register_ONE_HOT();
 TfLiteRegistration* Register_POW();
 TfLiteRegistration* Register_TANH();
-#ifndef TC3_AOSP
+TfLiteRegistration* Register_UNIQUE();
 TfLiteRegistration* Register_REDUCE_PROD();
 TfLiteRegistration* Register_SHAPE();
 TfLiteRegistration* Register_NOT_EQUAL();
@@ -70,26 +78,26 @@
 TfLiteRegistration* Register_EXPAND_DIMS();
 TfLiteRegistration* Register_FILL();
 TfLiteRegistration* Register_PADV2();
-#endif  // TC3_AOSP
 }  // namespace builtin
 }  // namespace ops
 }  // namespace tflite
 
 #ifdef TC3_WITH_ACTIONS_OPS
+#include "utils/tflite/blacklist.h"
 #include "utils/tflite/dist_diversification.h"
+#include "utils/tflite/string_projection.h"
 #include "utils/tflite/text_encoder.h"
 #include "utils/tflite/token_encoder.h"
-#ifndef TC3_AOSP
 namespace tflite {
 namespace ops {
 namespace custom {
 TfLiteRegistration* Register_SENTENCEPIECE_TOKENIZER();
 TfLiteRegistration* Register_RAGGED_TENSOR_TO_TENSOR();
 TfLiteRegistration* Register_RAGGED_RANGE();
+TfLiteRegistration* Register_RANDOM_UNIFORM();
 }  // namespace custom
 }  // namespace ops
 }  // namespace tflite
-#endif  // TC3_AOSP
 
 void RegisterSelectedOps(tflite::MutableOpResolver* resolver) {
   resolver->AddBuiltin(tflite::BuiltinOperator_ADD,
@@ -123,6 +131,8 @@
                        tflite::ops::builtin::Register_RESHAPE());
   resolver->AddBuiltin(::tflite::BuiltinOperator_REDUCE_MAX,
                        ::tflite::ops::builtin::Register_REDUCE_MAX());
+  resolver->AddBuiltin(::tflite::BuiltinOperator_REDUCE_MIN,
+                       ::tflite::ops::builtin::Register_REDUCE_MIN());
   resolver->AddBuiltin(::tflite::BuiltinOperator_REDUCE_ANY,
                        ::tflite::ops::builtin::Register_REDUCE_ANY());
   resolver->AddBuiltin(tflite::BuiltinOperator_SOFTMAX,
@@ -133,6 +143,15 @@
                        tflite::ops::builtin::Register_GATHER(),
                        /*min_version=*/1,
                        /*max_version=*/2);
+  resolver->AddBuiltin(::tflite::BuiltinOperator_GATHER_ND,
+                       ::tflite::ops::builtin::Register_GATHER_ND(),
+                       /*version=*/2);
+  resolver->AddBuiltin(::tflite::BuiltinOperator_IF,
+                       ::tflite::ops::builtin::Register_IF()),
+      resolver->AddBuiltin(::tflite::BuiltinOperator_ROUND,
+                           ::tflite::ops::builtin::Register_ROUND());
+  resolver->AddBuiltin(::tflite::BuiltinOperator_ZEROS_LIKE,
+                       ::tflite::ops::builtin::Register_ZEROS_LIKE());
   resolver->AddBuiltin(tflite::BuiltinOperator_TRANSPOSE,
                        tflite::ops::builtin::Register_TRANSPOSE(),
                        /*min_version=*/1,
@@ -153,6 +172,10 @@
                        tflite::ops::builtin::Register_TOPK_V2(),
                        /*min_version=*/1,
                        /*max_version=*/2);
+  resolver->AddBuiltin(tflite::BuiltinOperator_SLICE,
+                       tflite::ops::builtin::Register_SLICE(),
+                       /*min_version=*/1,
+                       /*max_version=*/3);
   resolver->AddBuiltin(tflite::BuiltinOperator_SPLIT,
                        tflite::ops::builtin::Register_SPLIT(),
                        /*min_version=*/1,
@@ -175,6 +198,8 @@
                        /*max_version=*/2);
   resolver->AddBuiltin(tflite::BuiltinOperator_LOG,
                        tflite::ops::builtin::Register_LOG());
+  resolver->AddBuiltin(tflite::BuiltinOperator_LOGISTIC,
+                       tflite::ops::builtin::Register_LOGISTIC());
   resolver->AddBuiltin(tflite::BuiltinOperator_SUM,
                        tflite::ops::builtin::Register_SUM());
   resolver->AddBuiltin(tflite::BuiltinOperator_PACK,
@@ -211,7 +236,8 @@
                        tflite::ops::builtin::Register_TANH(),
                        /*min_version=*/1,
                        /*max_version=*/1);
-#ifndef TC3_AOSP
+  resolver->AddBuiltin(::tflite::BuiltinOperator_UNIQUE,
+                       ::tflite::ops::builtin::Register_UNIQUE());
   resolver->AddBuiltin(::tflite::BuiltinOperator_REDUCE_PROD,
                        ::tflite::ops::builtin::Register_REDUCE_PROD());
   resolver->AddBuiltin(::tflite::BuiltinOperator_SHAPE,
@@ -226,7 +252,6 @@
                        ::tflite::ops::builtin::Register_FILL());
   resolver->AddBuiltin(::tflite::BuiltinOperator_PADV2,
                        ::tflite::ops::builtin::Register_PADV2());
-#endif  // TC3_AOSP
 }
 #else
 void RegisterSelectedOps(tflite::MutableOpResolver* resolver) {
@@ -258,7 +283,6 @@
                       tflite::ops::custom::Register_TEXT_ENCODER());
   resolver->AddCustom("TokenEncoder",
                       tflite::ops::custom::Register_TOKEN_ENCODER());
-#ifndef TC3_AOSP
   resolver->AddCustom(
       "TFSentencepieceTokenizeOp",
       ::tflite::ops::custom::Register_SENTENCEPIECE_TOKENIZER());
@@ -267,7 +291,14 @@
   resolver->AddCustom(
       "RaggedTensorToTensor",
       ::tflite::ops::custom::Register_RAGGED_TENSOR_TO_TENSOR());
-#endif  // TC3_AOSP
+  resolver->AddCustom(
+      "STRING_PROJECTION",
+      ::tflite::ops::custom::libtextclassifier3::Register_STRING_PROJECTION());
+  resolver->AddCustom(
+      "BLACKLIST",
+      ::tflite::ops::custom::libtextclassifier3::Register_BLACKLIST());
+  resolver->AddCustom("RandomUniform",
+                      ::tflite::ops::custom::Register_RANDOM_UNIFORM());
 #endif  // TC3_WITH_ACTIONS_OPS
   customize_fn(resolver.get());
   return std::unique_ptr<tflite::OpResolver>(std::move(resolver));
diff --git a/native/utils/tflite/blacklist.cc b/native/utils/tflite/blacklist.cc
new file mode 100644
index 0000000..b41fba1
--- /dev/null
+++ b/native/utils/tflite/blacklist.cc
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/tflite/blacklist.h"
+
+#include "utils/tflite/blacklist_base.h"
+#include "utils/tflite/skipgram_finder.h"
+#include "flatbuffers/flexbuffers.h"
+
+namespace tflite {
+namespace ops {
+namespace custom {
+
+namespace libtextclassifier3 {
+namespace blacklist {
+
+// Generates prediction vectors for input strings using a skipgram blacklist.
+// This uses the framework in `blacklist_base.h`, with the implementation detail
+// that the input is a string tensor of messages and the terms are skipgrams.
+class BlacklistOp : public BlacklistOpBase {
+ public:
+  explicit BlacklistOp(const flexbuffers::Map& custom_options)
+      : BlacklistOpBase(custom_options),
+        skipgram_finder_(custom_options["max_skip_size"].AsInt32()),
+        input_(nullptr) {
+    auto blacklist = custom_options["blacklist"].AsTypedVector();
+    auto blacklist_category =
+        custom_options["blacklist_category"].AsTypedVector();
+    for (int i = 0; i < blacklist.size(); i++) {
+      int category = blacklist_category[i].AsInt32();
+      flexbuffers::String s = blacklist[i].AsString();
+      skipgram_finder_.AddSkipgram(std::string(s.c_str(), s.length()),
+                                   category);
+    }
+  }
+
+  TfLiteStatus InitializeInput(TfLiteContext* context,
+                               TfLiteNode* node) override {
+    input_ = &context->tensors[node->inputs->data[kInputMessage]];
+    return kTfLiteOk;
+  }
+
+  absl::flat_hash_set<int> GetCategories(int i) const override {
+    StringRef input = GetString(input_, i);
+    return skipgram_finder_.FindSkipgrams(std::string(input.str, input.len));
+  }
+
+  void FinalizeInput() override { input_ = nullptr; }
+
+  TfLiteIntArray* GetInputShape(TfLiteContext* context,
+                                TfLiteNode* node) override {
+    return context->tensors[node->inputs->data[kInputMessage]].dims;
+  }
+
+ private:
+  ::libtextclassifier3::SkipgramFinder skipgram_finder_;
+  TfLiteTensor* input_;
+
+  static constexpr int kInputMessage = 0;
+};
+
+void* BlacklistOpInit(TfLiteContext* context, const char* buffer,
+                      size_t length) {
+  const uint8_t* buffer_t = reinterpret_cast<const uint8_t*>(buffer);
+  return new BlacklistOp(flexbuffers::GetRoot(buffer_t, length).AsMap());
+}
+
+}  // namespace blacklist
+
+TfLiteRegistration* Register_BLACKLIST() {
+  static TfLiteRegistration r = {libtextclassifier3::blacklist::BlacklistOpInit,
+                                 libtextclassifier3::blacklist::Free,
+                                 libtextclassifier3::blacklist::Resize,
+                                 libtextclassifier3::blacklist::Eval};
+  return &r;
+}
+
+}  // namespace libtextclassifier3
+}  // namespace custom
+}  // namespace ops
+}  // namespace tflite
diff --git a/native/utils/tflite/blacklist.h b/native/utils/tflite/blacklist.h
new file mode 100644
index 0000000..0fcf5c4
--- /dev/null
+++ b/native/utils/tflite/blacklist.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_UTILS_TFLITE_BLACKLIST_H_
+#define LIBTEXTCLASSIFIER_UTILS_TFLITE_BLACKLIST_H_
+
+#include "tensorflow/lite/context.h"
+
+namespace tflite {
+namespace ops {
+namespace custom {
+namespace libtextclassifier3 {
+
+TfLiteRegistration* Register_BLACKLIST();
+
+}  // namespace libtextclassifier3
+}  // namespace custom
+}  // namespace ops
+}  // namespace tflite
+
+#endif  // LIBTEXTCLASSIFIER_UTILS_TFLITE_BLACKLIST_H_
diff --git a/native/utils/tflite/blacklist_base.cc b/native/utils/tflite/blacklist_base.cc
new file mode 100644
index 0000000..214283b
--- /dev/null
+++ b/native/utils/tflite/blacklist_base.cc
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/tflite/blacklist_base.h"
+
+#include <cstdint>
+
+#include "absl/container/flat_hash_set.h"
+#include "tensorflow/lite/context.h"
+#include "tensorflow_models/seq_flow_lite/tflite_ops/quantization_util.h"
+namespace tflite {
+namespace ops {
+namespace custom {
+namespace libtextclassifier3 {
+namespace blacklist {
+
+static const int kOutputCategories = 0;
+
+void Free(TfLiteContext* context, void* buffer) {
+  delete reinterpret_cast<BlacklistOpBase*>(buffer);
+}
+
+TfLiteStatus Resize(TfLiteContext* context, TfLiteNode* node) {
+  auto* op = reinterpret_cast<BlacklistOpBase*>(node->user_data);
+
+  TfLiteIntArray* input_dims = op->GetInputShape(context, node);
+  TfLiteIntArray* output_dims = TfLiteIntArrayCreate(input_dims->size + 1);
+  for (int i = 0; i < input_dims->size; i++) {
+    output_dims->data[i] = input_dims->data[i];
+  }
+  output_dims->data[input_dims->size] = op->categories();
+  return context->ResizeTensor(
+      context, &context->tensors[node->outputs->data[kOutputCategories]],
+      output_dims);
+}
+
+TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) {
+  auto* op = reinterpret_cast<BlacklistOpBase*>(node->user_data);
+
+  TfLiteTensor* output_categories =
+      &context->tensors[node->outputs->data[kOutputCategories]];
+
+  TfLiteIntArray* input_dims = op->GetInputShape(context, node);
+  int input_size = 1;
+  for (int i = 0; i < input_dims->size; i++) {
+    input_size *= input_dims->data[i];
+  }
+  const int n_categories = op->categories();
+
+  TF_LITE_ENSURE_STATUS(op->InitializeInput(context, node));
+  if (output_categories->type == kTfLiteFloat32) {
+    for (int i = 0; i < input_size; i++) {
+      absl::flat_hash_set<int> categories = op->GetCategories(i);
+      if (categories.empty()) {
+        for (int j = 0; j < n_categories; j++) {
+          output_categories->data.f[i * n_categories + j] =
+              (j < op->negative_categories()) ? 1.0 : 0.0;
+        }
+      } else {
+        for (int j = 0; j < n_categories; j++) {
+          output_categories->data.f[i * n_categories + j] =
+              (categories.find(j) != categories.end()) ? 1.0 : 0.0;
+        }
+      }
+    }
+  } else if (output_categories->type == kTfLiteUInt8) {
+    const uint8_t one =
+        ::seq_flow_lite::PodQuantize(1.0, output_categories->params.zero_point,
+                                     1.0 / output_categories->params.scale);
+    const uint8_t zero =
+        ::seq_flow_lite::PodQuantize(0.0, output_categories->params.zero_point,
+                                     1.0 / output_categories->params.scale);
+    for (int i = 0; i < input_size; i++) {
+      absl::flat_hash_set<int> categories = op->GetCategories(i);
+      if (categories.empty()) {
+        for (int j = 0; j < n_categories; j++) {
+          output_categories->data.uint8[i * n_categories + j] =
+              (j < op->negative_categories()) ? one : zero;
+        }
+      } else {
+        for (int j = 0; j < n_categories; j++) {
+          output_categories->data.uint8[i * n_categories + j] =
+              (categories.find(j) != categories.end()) ? one : zero;
+        }
+      }
+    }
+  }
+  op->FinalizeInput();
+  return kTfLiteOk;
+}
+
+}  // namespace blacklist
+}  // namespace libtextclassifier3
+}  // namespace custom
+}  // namespace ops
+}  // namespace tflite
diff --git a/native/utils/tflite/blacklist_base.h b/native/utils/tflite/blacklist_base.h
new file mode 100644
index 0000000..3da1ed7
--- /dev/null
+++ b/native/utils/tflite/blacklist_base.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_UTILS_TFLITE_BLACKLIST_BASE_H_
+#define LIBTEXTCLASSIFIER_UTILS_TFLITE_BLACKLIST_BASE_H_
+
+#include "absl/container/flat_hash_set.h"
+#include "flatbuffers/flexbuffers.h"
+#include "tensorflow/lite/context.h"
+
+namespace tflite {
+namespace ops {
+namespace custom {
+namespace libtextclassifier3 {
+namespace blacklist {
+
+/*
+ * A framework for writing ops that generates prediction vectors using a
+ * blacklist.
+ *
+ * Input is defined by the specific implementation.
+ *
+ * Attributes:
+ *   blacklist:           string[n]
+ *     Terms in the blacklist.
+ *   blacklist_category:  int[n]
+ *     Category for each term in the blacklist.  Each category must be in
+ *     [0, categories).
+ *   categories:          int[]
+ *     Total number of categories.
+ *   negative_categories: int[]
+ *     Total number of negative categories.
+ *
+ * Output:
+ *   tensor[0]: Category indicators for each message, float[..., categories]
+ *
+ */
+
+class BlacklistOpBase {
+ public:
+  explicit BlacklistOpBase(const flexbuffers::Map& custom_options)
+      : categories_(custom_options["categories"].AsInt32()),
+        negative_categories_(custom_options["negative_categories"].AsInt32()) {}
+
+  virtual ~BlacklistOpBase() {}
+
+  int categories() const { return categories_; }
+  int negative_categories() const { return negative_categories_; }
+
+  virtual TfLiteStatus InitializeInput(TfLiteContext* context,
+                                       TfLiteNode* node) = 0;
+  virtual absl::flat_hash_set<int> GetCategories(int i) const = 0;
+  virtual void FinalizeInput() = 0;
+
+  // Returns the input shape.  TfLiteIntArray is owned by the object.
+  virtual TfLiteIntArray* GetInputShape(TfLiteContext* context,
+                                        TfLiteNode* node) = 0;
+
+ private:
+  int categories_;
+  int negative_categories_;
+};
+
+// Individual ops should define an Init() function that returns a
+// BlacklistOpBase.
+
+void Free(TfLiteContext* context, void* buffer);
+
+TfLiteStatus Resize(TfLiteContext* context, TfLiteNode* node);
+
+TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node);
+}  // namespace blacklist
+}  // namespace libtextclassifier3
+}  // namespace custom
+}  // namespace ops
+}  // namespace tflite
+
+#endif  // LIBTEXTCLASSIFIER_UTILS_TFLITE_BLACKLIST_BASE_H_
diff --git a/native/utils/tflite/skipgram_finder.cc b/native/utils/tflite/skipgram_finder.cc
new file mode 100644
index 0000000..c69193e
--- /dev/null
+++ b/native/utils/tflite/skipgram_finder.cc
@@ -0,0 +1,203 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/tflite/skipgram_finder.h"
+
+#include <cctype>
+#include <deque>
+#include <string>
+#include <vector>
+
+#include "utils/strings/utf8.h"
+#include "utils/utf8/unilib-common.h"
+#include "absl/container/flat_hash_map.h"
+#include "absl/container/flat_hash_set.h"
+#include "absl/strings/match.h"
+#include "absl/strings/str_split.h"
+#include "absl/strings/string_view.h"
+#include "tensorflow/lite/string_util.h"
+
+namespace libtextclassifier3 {
+namespace {
+
+using ::tflite::StringRef;
+
+void PreprocessToken(std::string& token) {
+  size_t in = 0;
+  size_t out = 0;
+  while (in < token.size()) {
+    const char* in_data = token.data() + in;
+    const int n = GetNumBytesForUTF8Char(in_data);
+    if (n < 0 || n > token.size() - in) {
+      // Invalid Utf8 sequence.
+      break;
+    }
+    in += n;
+    const char32 r = ValidCharToRune(in_data);
+    if (IsPunctuation(r)) {
+      continue;
+    }
+    const char32 rl = ToLower(r);
+    char output_buffer[4];
+    int encoded_length = ValidRuneToChar(rl, output_buffer);
+    if (encoded_length > n) {
+      // This is a hack, but there are exactly two unicode characters whose
+      // lowercase versions have longer UTF-8 encodings (0x23a to 0x2c65,
+      // 0x23e to 0x2c66).  So, to avoid sizing issues, they're not lowercased.
+      encoded_length = ValidRuneToChar(r, output_buffer);
+    }
+    memcpy(token.data() + out, output_buffer, encoded_length);
+    out += encoded_length;
+  }
+
+  size_t remaining = token.size() - in;
+  if (remaining > 0) {
+    memmove(token.data() + out, token.data() + in, remaining);
+    out += remaining;
+  }
+  token.resize(out);
+}
+
+}  // namespace
+
+void SkipgramFinder::AddSkipgram(const std::string& skipgram, int category) {
+  std::vector<std::string> tokens = absl::StrSplit(skipgram, ' ');
+
+  // Store the skipgram in a trie-like structure that uses tokens as the
+  // edge labels, instead of characters.  Each node represents a skipgram made
+  // from the tokens used to reach the node, and stores the categories the
+  // skipgram is associated with.
+  TrieNode* cur = &skipgram_trie_;
+  for (auto& token : tokens) {
+    if (absl::EndsWith(token, ".*")) {
+      token.resize(token.size() - 2);
+      PreprocessToken(token);
+      auto iter = cur->prefix_to_node.find(token);
+      if (iter != cur->prefix_to_node.end()) {
+        cur = &iter->second;
+      } else {
+        cur = &cur->prefix_to_node
+                   .emplace(std::piecewise_construct,
+                            std::forward_as_tuple(token), std::make_tuple<>())
+                   .first->second;
+      }
+      continue;
+    }
+
+    PreprocessToken(token);
+    auto iter = cur->token_to_node.find(token);
+    if (iter != cur->token_to_node.end()) {
+      cur = &iter->second;
+    } else {
+      cur = &cur->token_to_node
+                 .emplace(std::piecewise_construct,
+                          std::forward_as_tuple(token), std::make_tuple<>())
+                 .first->second;
+    }
+  }
+  cur->categories.insert(category);
+}
+
+absl::flat_hash_set<int> SkipgramFinder::FindSkipgrams(
+    const std::string& input) const {
+  std::vector<std::string> tokens = absl::StrSplit(input, ' ');
+  std::vector<absl::string_view> sv_tokens;
+  sv_tokens.reserve(tokens.size());
+  for (auto& token : tokens) {
+    PreprocessToken(token);
+    sv_tokens.emplace_back(token.data(), token.size());
+  }
+  return FindSkipgrams(sv_tokens);
+}
+
+absl::flat_hash_set<int> SkipgramFinder::FindSkipgrams(
+    const std::vector<StringRef>& tokens) const {
+  std::vector<absl::string_view> sv_tokens;
+  sv_tokens.reserve(tokens.size());
+  for (auto& token : tokens) {
+    sv_tokens.emplace_back(token.str, token.len);
+  }
+  return FindSkipgrams(sv_tokens);
+}
+
+absl::flat_hash_set<int> SkipgramFinder::FindSkipgrams(
+    const std::vector<absl::string_view>& tokens) const {
+  absl::flat_hash_set<int> categories;
+
+  // Tracks skipgram prefixes and the index of their last token.
+  std::deque<std::pair<int, const TrieNode*>> indices_and_skipgrams;
+
+  for (int token_i = 0; token_i < tokens.size(); token_i++) {
+    const absl::string_view& token = tokens[token_i];
+
+    std::vector<absl::string_view> token_prefixes;
+    {
+      const char* s = token.data();
+      int n = token.size();
+      while (n > 0) {
+        const int rlen = GetNumBytesForUTF8Char(s);
+        if (rlen < 0 || rlen > n) {
+          // Invalid UTF8.
+          break;
+        }
+        n -= rlen;
+        s += rlen;
+        token_prefixes.emplace_back(token.data(), token.size() - n);
+      }
+    }
+
+    // Drop any skipgrams prefixes which would skip more than `max_skip_size_`
+    // tokens between the end of the prefix and the current token.
+    while (!indices_and_skipgrams.empty()) {
+      if (indices_and_skipgrams.front().first + max_skip_size_ + 1 < token_i) {
+        indices_and_skipgrams.pop_front();
+      } else {
+        break;
+      }
+    }
+
+    // Check if we can form a valid skipgram prefix (or skipgram) by adding
+    // the current token to any of the existing skipgram prefixes, or
+    // if the current token is a valid skipgram prefix (or skipgram).
+    size_t size = indices_and_skipgrams.size();
+    for (size_t skipgram_i = 0; skipgram_i <= size; skipgram_i++) {
+      const auto& node = skipgram_i < size
+                             ? *indices_and_skipgrams[skipgram_i].second
+                             : skipgram_trie_;
+
+      auto iter = node.token_to_node.find(token);
+      if (iter != node.token_to_node.end()) {
+        categories.insert(iter->second.categories.begin(),
+                          iter->second.categories.end());
+        indices_and_skipgrams.push_back(std::make_pair(token_i, &iter->second));
+      }
+
+      for (const auto& token_prefix : token_prefixes) {
+        auto iter = node.prefix_to_node.find(token_prefix);
+        if (iter != node.prefix_to_node.end()) {
+          categories.insert(iter->second.categories.begin(),
+                            iter->second.categories.end());
+          indices_and_skipgrams.push_back(
+              std::make_pair(token_i, &iter->second));
+        }
+      }
+    }
+  }
+
+  return categories;
+}
+
+}  // namespace libtextclassifier3
diff --git a/native/utils/tflite/skipgram_finder.h b/native/utils/tflite/skipgram_finder.h
new file mode 100644
index 0000000..e7e8547
--- /dev/null
+++ b/native/utils/tflite/skipgram_finder.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_UTILS_TFLITE_SKIPGRAM_FINDER_H_
+#define LIBTEXTCLASSIFIER_UTILS_TFLITE_SKIPGRAM_FINDER_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/container/flat_hash_map.h"
+#include "absl/container/flat_hash_set.h"
+#include "absl/strings/string_view.h"
+#include "tensorflow/lite/string_util.h"
+
+namespace libtextclassifier3 {
+
+// SkipgramFinder finds skipgrams in strings.
+//
+// To use: First, add skipgrams using AddSkipgram() - each skipgram is
+// associated with some category.  Then, call FindSkipgrams() on a string,
+// which will return the set of categories of the skipgrams in the string.
+//
+// Both the skipgrams and the input strings will be tokenzied by splitting
+// on spaces.  Additionally, the tokens will be lowercased and have any
+// trailing punctuation removed.
+class SkipgramFinder {
+ public:
+  explicit SkipgramFinder(int max_skip_size) : max_skip_size_(max_skip_size) {}
+
+  // Adds a skipgram that SkipgramFinder should look for in input strings.
+  // Tokens may use the regex '.*' as a suffix.
+  void AddSkipgram(const std::string& skipgram, int category);
+
+  // Find all of the skipgrams in `input`, and return their categories.
+  absl::flat_hash_set<int> FindSkipgrams(const std::string& input) const;
+
+  // Find all of the skipgrams in `tokens`, and return their categories.
+  absl::flat_hash_set<int> FindSkipgrams(
+      const std::vector<absl::string_view>& tokens) const;
+  absl::flat_hash_set<int> FindSkipgrams(
+      const std::vector<::tflite::StringRef>& tokens) const;
+
+ private:
+  struct TrieNode {
+    absl::flat_hash_set<int> categories;
+    // Maps tokens to the next node in the trie.
+    absl::flat_hash_map<std::string, TrieNode> token_to_node;
+    // Maps token prefixes (<prefix>.*) to the next node in the trie.
+    absl::flat_hash_map<std::string, TrieNode> prefix_to_node;
+  };
+
+  TrieNode skipgram_trie_;
+  int max_skip_size_;
+};
+
+}  // namespace libtextclassifier3
+#endif  // LIBTEXTCLASSIFIER_UTILS_TFLITE_SKIPGRAM_FINDER_H_
diff --git a/native/utils/tflite/string_projection.cc b/native/utils/tflite/string_projection.cc
new file mode 100644
index 0000000..9f8d36e
--- /dev/null
+++ b/native/utils/tflite/string_projection.cc
@@ -0,0 +1,579 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/tflite/string_projection.h"
+
+#include <string>
+#include <unordered_map>
+
+#include "utils/strings/utf8.h"
+#include "utils/tflite/string_projection_base.h"
+#include "utils/utf8/unilib-common.h"
+#include "absl/container/flat_hash_set.h"
+#include "absl/strings/match.h"
+#include "flatbuffers/flexbuffers.h"
+#include "tensorflow/lite/context.h"
+#include "tensorflow/lite/string_util.h"
+
+namespace tflite {
+namespace ops {
+namespace custom {
+
+namespace libtextclassifier3 {
+namespace string_projection {
+namespace {
+
+const char kStartToken[] = "<S>";
+const char kEndToken[] = "<E>";
+const char kEmptyToken[] = "<S> <E>";
+constexpr size_t kEntireString = SIZE_MAX;
+constexpr size_t kAllTokens = SIZE_MAX;
+constexpr int kInvalid = -1;
+
+constexpr char kApostrophe = '\'';
+constexpr char kSpace = ' ';
+constexpr char kComma = ',';
+constexpr char kDot = '.';
+
+// Returns true if the given text contains a number.
+bool IsDigitString(const std::string& text) {
+  for (size_t i = 0; i < text.length();) {
+    const int bytes_read =
+        ::libtextclassifier3::GetNumBytesForUTF8Char(text.data());
+    if (bytes_read <= 0 || bytes_read > text.length() - i) {
+      break;
+    }
+    const char32_t rune = ::libtextclassifier3::ValidCharToRune(text.data());
+    if (::libtextclassifier3::IsDigit(rune)) return true;
+    i += bytes_read;
+  }
+  return false;
+}
+
+// Gets the string containing |num_chars| characters from |start| position.
+std::string GetCharToken(const std::vector<std::string>& char_tokens, int start,
+                         int num_chars) {
+  std::string char_token = "";
+  if (start + num_chars <= char_tokens.size()) {
+    for (int i = 0; i < num_chars; ++i) {
+      char_token.append(char_tokens[start + i]);
+    }
+  }
+  return char_token;
+}
+
+// Counts how many times |pattern| appeared from |start| position.
+int GetNumPattern(const std::vector<std::string>& char_tokens, size_t start,
+                  size_t num_chars, const std::string& pattern) {
+  int count = 0;
+  for (int i = start; i < char_tokens.size(); i += num_chars) {
+    std::string cur_pattern = GetCharToken(char_tokens, i, num_chars);
+    if (pattern == cur_pattern) {
+      ++count;
+    } else {
+      break;
+    }
+  }
+  return count;
+}
+
+inline size_t FindNextSpace(const char* input_ptr, size_t from, size_t length) {
+  size_t space_index;
+  for (space_index = from; space_index < length; space_index++) {
+    if (input_ptr[space_index] == kSpace) {
+      break;
+    }
+  }
+  return space_index == length ? kInvalid : space_index;
+}
+
+template <typename T>
+void SplitByCharInternal(std::vector<T>* tokens, const char* input_ptr,
+                         size_t len, size_t max_tokens) {
+  for (size_t i = 0; i < len;) {
+    auto bytes_read =
+        ::libtextclassifier3::GetNumBytesForUTF8Char(input_ptr + i);
+    if (bytes_read <= 0 || bytes_read > len - i) break;
+    tokens->emplace_back(input_ptr + i, bytes_read);
+    if (max_tokens != kInvalid && tokens->size() == max_tokens) {
+      break;
+    }
+    i += bytes_read;
+  }
+}
+
+std::vector<std::string> SplitByChar(const char* input_ptr, size_t len,
+                                     size_t max_tokens) {
+  std::vector<std::string> tokens;
+  SplitByCharInternal(&tokens, input_ptr, len, max_tokens);
+  return tokens;
+}
+
+std::string ContractToken(const char* input_ptr, size_t len, size_t num_chars) {
+  // This function contracts patterns whose length is |num_chars| and appeared
+  // more than twice. So if the input is shorter than 3 * |num_chars|, do not
+  // apply any contraction.
+  if (len < 3 * num_chars) {
+    return input_ptr;
+  }
+  std::vector<std::string> char_tokens = SplitByChar(input_ptr, len, len);
+
+  std::string token;
+  token.reserve(len);
+  for (int i = 0; i < char_tokens.size();) {
+    std::string cur_pattern = GetCharToken(char_tokens, i, num_chars);
+
+    // Count how many times this pattern appeared.
+    int num_cur_patterns = 0;
+    if (!absl::StrContains(cur_pattern, " ") && !IsDigitString(cur_pattern)) {
+      num_cur_patterns =
+          GetNumPattern(char_tokens, i + num_chars, num_chars, cur_pattern);
+    }
+
+    if (num_cur_patterns >= 2) {
+      // If this pattern is repeated, store it only twice.
+      token.append(cur_pattern);
+      token.append(cur_pattern);
+      i += (num_cur_patterns + 1) * num_chars;
+    } else {
+      token.append(char_tokens[i]);
+      ++i;
+    }
+  }
+
+  return token;
+}
+
+template <typename T>
+void SplitBySpaceInternal(std::vector<T>* tokens, const char* input_ptr,
+                          size_t len, size_t max_input, size_t max_tokens) {
+  size_t last_index =
+      max_input == kEntireString ? len : (len < max_input ? len : max_input);
+  size_t start = 0;
+  // skip leading spaces
+  while (start < last_index && input_ptr[start] == kSpace) {
+    start++;
+  }
+  auto end = FindNextSpace(input_ptr, start, last_index);
+  while (end != kInvalid &&
+         (max_tokens == kAllTokens || tokens->size() < max_tokens - 1)) {
+    auto length = end - start;
+    if (length > 0) {
+      tokens->emplace_back(input_ptr + start, length);
+    }
+
+    start = end + 1;
+    end = FindNextSpace(input_ptr, start, last_index);
+  }
+  auto length = end == kInvalid ? (last_index - start) : (end - start);
+  if (length > 0) {
+    tokens->emplace_back(input_ptr + start, length);
+  }
+}
+
+std::vector<std::string> SplitBySpace(const char* input_ptr, size_t len,
+                                      size_t max_input, size_t max_tokens) {
+  std::vector<std::string> tokens;
+  SplitBySpaceInternal(&tokens, input_ptr, len, max_input, max_tokens);
+  return tokens;
+}
+
+bool prepend_separator(char separator) { return separator == kApostrophe; }
+
+bool is_numeric(char c) { return c >= '0' && c <= '9'; }
+
+class ProjectionNormalizer {
+ public:
+  explicit ProjectionNormalizer(const std::string& separators,
+                                bool normalize_repetition = false) {
+    InitializeSeparators(separators);
+    normalize_repetition_ = normalize_repetition;
+  }
+
+  // Normalizes the repeated characters (except numbers) which consecutively
+  // appeared more than twice in a word.
+  std::string Normalize(const std::string& input, size_t max_input = 300) {
+    return Normalize(input.data(), input.length(), max_input);
+  }
+  std::string Normalize(const char* input_ptr, size_t len,
+                        size_t max_input = 300) {
+    std::string normalized(input_ptr, std::min(len, max_input));
+
+    if (normalize_repetition_) {
+      // Remove repeated 1 char (e.g. soooo => soo)
+      normalized = ContractToken(normalized.data(), normalized.length(), 1);
+
+      // Remove repeated 2 chars from the beginning (e.g. hahaha =>
+      // haha, xhahaha => xhaha, xyhahaha => xyhaha).
+      normalized = ContractToken(normalized.data(), normalized.length(), 2);
+
+      // Remove repeated 3 chars from the beginning
+      // (e.g. wowwowwow => wowwow, abcdbcdbcd => abcdbcd).
+      normalized = ContractToken(normalized.data(), normalized.length(), 3);
+    }
+
+    if (!separators_.empty()) {
+      // Add space around separators_.
+      normalized = NormalizeInternal(normalized.data(), normalized.length());
+    }
+    return normalized;
+  }
+
+ private:
+  // Parses and extracts supported separators.
+  void InitializeSeparators(const std::string& separators) {
+    for (int i = 0; i < separators.length(); ++i) {
+      if (separators[i] != ' ') {
+        separators_.insert(separators[i]);
+      }
+    }
+  }
+
+  // Removes repeated chars.
+  std::string NormalizeInternal(const char* input_ptr, size_t len) {
+    std::string normalized;
+    normalized.reserve(len * 2);
+    for (int i = 0; i < len; ++i) {
+      char c = input_ptr[i];
+      bool matched_separator = separators_.find(c) != separators_.end();
+      if (matched_separator) {
+        if (i > 0 && input_ptr[i - 1] != ' ' && normalized.back() != ' ') {
+          normalized.append(" ");
+        }
+      }
+      normalized.append(1, c);
+      if (matched_separator) {
+        if (i + 1 < len && input_ptr[i + 1] != ' ' && c != '\'') {
+          normalized.append(" ");
+        }
+      }
+    }
+    return normalized;
+  }
+
+  absl::flat_hash_set<char> separators_;
+  bool normalize_repetition_;
+};
+
+class ProjectionTokenizer {
+ public:
+  explicit ProjectionTokenizer(const std::string& separators) {
+    InitializeSeparators(separators);
+  }
+
+  // Tokenizes the input by separators_. Limit to max_tokens, when it is not -1.
+  std::vector<std::string> Tokenize(const std::string& input, size_t max_input,
+                                    size_t max_tokens) const {
+    return Tokenize(input.c_str(), input.size(), max_input, max_tokens);
+  }
+
+  std::vector<std::string> Tokenize(const char* input_ptr, size_t len,
+                                    size_t max_input, size_t max_tokens) const {
+    // If separators_ is not given, tokenize the input with a space.
+    if (separators_.empty()) {
+      return SplitBySpace(input_ptr, len, max_input, max_tokens);
+    }
+
+    std::vector<std::string> tokens;
+    size_t last_index =
+        max_input == kEntireString ? len : (len < max_input ? len : max_input);
+    size_t start = 0;
+    // Skip leading spaces.
+    while (start < last_index && input_ptr[start] == kSpace) {
+      start++;
+    }
+    auto end = FindNextSeparator(input_ptr, start, last_index);
+
+    while (end != kInvalid &&
+           (max_tokens == kAllTokens || tokens.size() < max_tokens - 1)) {
+      auto length = end - start;
+      if (length > 0) tokens.emplace_back(input_ptr + start, length);
+
+      // Add the separator (except space and apostrophe) as a token
+      char separator = input_ptr[end];
+      if (separator != kSpace && separator != kApostrophe) {
+        tokens.emplace_back(input_ptr + end, 1);
+      }
+
+      start = end + (prepend_separator(separator) ? 0 : 1);
+      end = FindNextSeparator(input_ptr, end + 1, last_index);
+    }
+    auto length = end == kInvalid ? (last_index - start) : (end - start);
+    if (length > 0) tokens.emplace_back(input_ptr + start, length);
+    return tokens;
+  }
+
+ private:
+  // Parses and extracts supported separators.
+  void InitializeSeparators(const std::string& separators) {
+    for (int i = 0; i < separators.length(); ++i) {
+      separators_.insert(separators[i]);
+    }
+  }
+
+  // Starting from input_ptr[from], search for the next occurrence of
+  // separators_. Don't search beyond input_ptr[length](non-inclusive). Return
+  // -1 if not found.
+  size_t FindNextSeparator(const char* input_ptr, size_t from,
+                           size_t length) const {
+    auto index = from;
+    while (index < length) {
+      char c = input_ptr[index];
+      // Do not break a number (e.g. "10,000", "0.23").
+      if (c == kComma || c == kDot) {
+        if (index + 1 < length && is_numeric(input_ptr[index + 1])) {
+          c = input_ptr[++index];
+        }
+      }
+      if (separators_.find(c) != separators_.end()) {
+        break;
+      }
+      ++index;
+    }
+    return index == length ? kInvalid : index;
+  }
+
+  absl::flat_hash_set<char> separators_;
+};
+
+inline void StripTrailingAsciiPunctuation(std::string* str) {
+  auto it = std::find_if_not(str->rbegin(), str->rend(), ::ispunct);
+  str->erase(str->rend() - it);
+}
+
+std::string PreProcessString(const char* str, int len,
+                             const bool remove_punctuation) {
+  std::string output_str(str, len);
+  std::transform(output_str.begin(), output_str.end(), output_str.begin(),
+                 ::tolower);
+
+  // Remove trailing punctuation.
+  if (remove_punctuation) {
+    StripTrailingAsciiPunctuation(&output_str);
+  }
+
+  if (output_str.empty()) {
+    output_str.assign(str, len);
+  }
+  return output_str;
+}
+
+bool ShouldIncludeCurrentNgram(const SkipGramParams& params, int size) {
+  if (size <= 0) {
+    return false;
+  }
+  if (params.include_all_ngrams) {
+    return size <= params.ngram_size;
+  } else {
+    return size == params.ngram_size;
+  }
+}
+
+bool ShouldStepInRecursion(const std::vector<int>& stack, int stack_idx,
+                           int num_words, const SkipGramParams& params) {
+  // If current stack size and next word enumeration are within valid range.
+  if (stack_idx < params.ngram_size && stack[stack_idx] + 1 < num_words) {
+    // If this stack is empty, step in for first word enumeration.
+    if (stack_idx == 0) {
+      return true;
+    }
+    // If next word enumeration are within the range of max_skip_size.
+    // NOTE: equivalent to
+    //   next_word_idx = stack[stack_idx] + 1
+    //   next_word_idx - stack[stack_idx-1] <= max_skip_size + 1
+    if (stack[stack_idx] - stack[stack_idx - 1] <= params.max_skip_size) {
+      return true;
+    }
+  }
+  return false;
+}
+
+std::string JoinTokensBySpace(const std::vector<int>& stack, int stack_idx,
+                              const std::vector<std::string>& tokens) {
+  int len = 0;
+  for (int i = 0; i < stack_idx; i++) {
+    len += tokens[stack[i]].size();
+  }
+  len += stack_idx - 1;
+
+  std::string res;
+  res.reserve(len);
+  res.append(tokens[stack[0]]);
+  for (int i = 1; i < stack_idx; i++) {
+    res.append(" ");
+    res.append(tokens[stack[i]]);
+  }
+
+  return res;
+}
+
+std::unordered_map<std::string, int> ExtractSkipGramsImpl(
+    const std::vector<std::string>& tokens, const SkipGramParams& params) {
+  // Ignore positional tokens.
+  static auto* blacklist = new std::unordered_set<std::string>({
+      kStartToken,
+      kEndToken,
+      kEmptyToken,
+  });
+
+  std::unordered_map<std::string, int> res;
+
+  // Stack stores the index of word used to generate ngram.
+  // The size of stack is the size of ngram.
+  std::vector<int> stack(params.ngram_size + 1, 0);
+  // Stack index that indicates which depth the recursion is operating at.
+  int stack_idx = 1;
+  int num_words = tokens.size();
+
+  while (stack_idx >= 0) {
+    if (ShouldStepInRecursion(stack, stack_idx, num_words, params)) {
+      // When current depth can fill with a new word
+      // and the new word is within the max range to skip,
+      // fill this word to stack, recurse into next depth.
+      stack[stack_idx]++;
+      stack_idx++;
+      stack[stack_idx] = stack[stack_idx - 1];
+    } else {
+      if (ShouldIncludeCurrentNgram(params, stack_idx)) {
+        // Add n-gram to tensor buffer when the stack has filled with enough
+        // words to generate the ngram.
+        std::string ngram = JoinTokensBySpace(stack, stack_idx, tokens);
+        if (blacklist->find(ngram) == blacklist->end()) {
+          res[ngram] = stack_idx;
+        }
+      }
+      // When current depth cannot fill with a valid new word,
+      // and not in last depth to generate ngram,
+      // step back to previous depth to iterate to next possible word.
+      stack_idx--;
+    }
+  }
+
+  return res;
+}
+
+std::unordered_map<std::string, int> ExtractSkipGrams(
+    const std::string& input, ProjectionTokenizer* tokenizer,
+    ProjectionNormalizer* normalizer, const SkipGramParams& params) {
+  // Normalize the input.
+  const std::string& normalized =
+      normalizer == nullptr
+          ? input
+          : normalizer->Normalize(input, params.max_input_chars);
+
+  // Split sentence to words.
+  std::vector<std::string> tokens;
+  if (params.char_level) {
+    tokens = SplitByChar(normalized.data(), normalized.size(),
+                         params.max_input_chars);
+  } else {
+    tokens = tokenizer->Tokenize(normalized.data(), normalized.size(),
+                                 params.max_input_chars, kAllTokens);
+  }
+
+  // Process tokens
+  for (int i = 0; i < tokens.size(); ++i) {
+    if (params.preprocess) {
+      tokens[i] = PreProcessString(tokens[i].data(), tokens[i].size(),
+                                   params.remove_punctuation);
+    }
+  }
+
+  tokens.insert(tokens.begin(), kStartToken);
+  tokens.insert(tokens.end(), kEndToken);
+
+  return ExtractSkipGramsImpl(tokens, params);
+}
+}  // namespace
+// Generates LSH projections for input strings.  This uses the framework in
+// `string_projection_base.h`, with the implementation details that the input is
+// a string tensor of messages and the op will perform tokenization.
+//
+// Input:
+//   tensor[0]: Input message, string[...]
+//
+// Additional attributes:
+//   max_input_chars: int[]
+//     maximum number of input characters to use from each message.
+//   token_separators: string[]
+//     the list of separators used to tokenize the input.
+//   normalize_repetition: bool[]
+//     if true, remove repeated characters in tokens ('loool' -> 'lol').
+
+static const int kInputMessage = 0;
+
+class StringProjectionOp : public StringProjectionOpBase {
+ public:
+  explicit StringProjectionOp(const flexbuffers::Map& custom_options)
+      : StringProjectionOpBase(custom_options),
+        projection_normalizer_(
+            custom_options["token_separators"].AsString().str(),
+            custom_options["normalize_repetition"].AsBool()),
+        projection_tokenizer_(" ") {
+    if (custom_options["max_input_chars"].IsInt()) {
+      skip_gram_params().max_input_chars =
+          custom_options["max_input_chars"].AsInt32();
+    }
+  }
+
+  TfLiteStatus InitializeInput(TfLiteContext* context,
+                               TfLiteNode* node) override {
+    input_ = &context->tensors[node->inputs->data[kInputMessage]];
+    return kTfLiteOk;
+  }
+
+  std::unordered_map<std::string, int> ExtractSkipGrams(int i) override {
+    StringRef input = GetString(input_, i);
+    return ::tflite::ops::custom::libtextclassifier3::string_projection::
+        ExtractSkipGrams({input.str, static_cast<size_t>(input.len)},
+                         &projection_tokenizer_, &projection_normalizer_,
+                         skip_gram_params());
+  }
+
+  void FinalizeInput() override { input_ = nullptr; }
+
+  TfLiteIntArray* GetInputShape(TfLiteContext* context,
+                                TfLiteNode* node) override {
+    return context->tensors[node->inputs->data[kInputMessage]].dims;
+  }
+
+ private:
+  ProjectionNormalizer projection_normalizer_;
+  ProjectionTokenizer projection_tokenizer_;
+
+  TfLiteTensor* input_;
+};
+
+void* Init(TfLiteContext* context, const char* buffer, size_t length) {
+  const uint8_t* buffer_t = reinterpret_cast<const uint8_t*>(buffer);
+  return new StringProjectionOp(flexbuffers::GetRoot(buffer_t, length).AsMap());
+}
+
+}  // namespace string_projection
+
+// This op converts a list of strings to integers via LSH projections.
+TfLiteRegistration* Register_STRING_PROJECTION() {
+  static TfLiteRegistration r = {libtextclassifier3::string_projection::Init,
+                                 libtextclassifier3::string_projection::Free,
+                                 libtextclassifier3::string_projection::Resize,
+                                 libtextclassifier3::string_projection::Eval};
+  return &r;
+}
+
+}  // namespace libtextclassifier3
+}  // namespace custom
+}  // namespace ops
+}  // namespace tflite
diff --git a/native/utils/tflite/string_projection.h b/native/utils/tflite/string_projection.h
new file mode 100644
index 0000000..ba86a21
--- /dev/null
+++ b/native/utils/tflite/string_projection.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_UTILS_TFLITE_STRING_PROJECTION_H_
+#define LIBTEXTCLASSIFIER_UTILS_TFLITE_STRING_PROJECTION_H_
+
+#include "tensorflow/lite/context.h"
+
+namespace tflite {
+namespace ops {
+namespace custom {
+namespace libtextclassifier3 {
+
+TfLiteRegistration* Register_STRING_PROJECTION();
+
+}  // namespace libtextclassifier3
+}  // namespace custom
+}  // namespace ops
+}  // namespace tflite
+
+#endif  // LIBTEXTCLASSIFIER_UTILS_TFLITE_STRING_PROJECTION_H_
diff --git a/native/utils/tflite/string_projection_base.cc b/native/utils/tflite/string_projection_base.cc
new file mode 100644
index 0000000..d185f52
--- /dev/null
+++ b/native/utils/tflite/string_projection_base.cc
@@ -0,0 +1,255 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/tflite/string_projection_base.h"
+
+#include <cmath>
+#include <cstdint>
+#include <cstring>
+#include <memory>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "utils/hash/cityhash.h"
+#include "utils/hash/farmhash.h"
+#include "flatbuffers/flexbuffers.h"
+#include "tensorflow/lite/context.h"
+#include "tensorflow_models/seq_flow_lite/tflite_ops/quantization_util.h"
+
+namespace tflite {
+namespace ops {
+namespace custom {
+namespace libtextclassifier3 {
+namespace string_projection {
+
+namespace {
+const int32_t kMaxInputChars = 300;
+
+const int kOutputLabel = 0;
+const char kFastHash[] = "[DEV] FastHash";
+const char kAXB[] = "[DEV] AXB";
+
+const int kSeedSize = sizeof(float);
+const int kInputItemBytes = sizeof(int32_t);
+const int kKeyBytes = sizeof(float) + sizeof(int32_t);
+
+}  // namespace
+
+StringProjectionOpBase::StringProjectionOpBase(
+    const flexbuffers::Map& custom_options)
+    : hash_function_(custom_options["hash_function"].AsTypedVector()),
+      num_hash_(custom_options["num_hash"].AsInt32()),
+      num_bits_(custom_options["num_bits"].AsInt32()),
+      binary_projection_(custom_options["binary_projection"].AsBool()),
+      hash_method_(custom_options["hash_method"].ToString()),
+      axb_scale_(custom_options["axb_scale"].AsFloat()) {
+  skip_gram_params_ = {
+      .ngram_size = custom_options["ngram_size"].AsInt32(),
+      .max_skip_size = custom_options["max_skip_size"].AsInt32(),
+      .include_all_ngrams = custom_options["include_all_ngrams"].AsBool(),
+      .preprocess = custom_options["preprocess"].AsBool(),
+      .char_level = custom_options["char_level"].AsBool(),
+      .remove_punctuation = custom_options["remove_punctuation"].AsBool(),
+      .max_input_chars = kMaxInputChars,
+  };
+}
+
+void StringProjectionOpBase::GetFeatureWeights(
+    const std::unordered_map<std::string, int>& feature_counts,
+    std::vector<std::vector<int64_t>>* batch_ids,
+    std::vector<std::vector<float>>* batch_weights) {
+  std::vector<int64_t> ids;
+  std::vector<float> weights;
+  for (const auto& iter : feature_counts) {
+    if (hash_method_ == kFastHash || hash_method_ == kAXB) {
+      int32_t feature_id =
+          tc3farmhash::CityHash64(iter.first.c_str(), iter.first.size());
+      ids.push_back(feature_id);
+      weights.push_back(iter.second);
+    } else {
+      int64_t feature_id =
+          tc3farmhash::Fingerprint64(iter.first.c_str(), iter.first.size());
+      ids.push_back(feature_id);
+      weights.push_back(iter.second);
+    }
+  }
+
+  batch_ids->push_back(ids);
+  batch_weights->push_back(weights);
+}
+
+void StringProjectionOpBase::DenseLshProjection(
+    const int batch_size, const std::vector<std::vector<int64_t>>& batch_ids,
+    const std::vector<std::vector<float>>& batch_weights,
+    TfLiteTensor* output) {
+  auto key = std::unique_ptr<char[]>(
+      new char[kKeyBytes]);  // NOLINT: modernize-make-unique
+
+  if (output->type == kTfLiteFloat32) {
+    for (int batch = 0; batch < batch_size; ++batch) {
+      const std::vector<int64_t>& input = batch_ids[batch];
+      const std::vector<float>& weight = batch_weights[batch];
+
+      for (int i = 0; i < num_hash_; i++) {
+        for (int j = 0; j < num_bits_; j++) {
+          int hash_bit = i * num_bits_ + j;
+          float seed = hash_function_[hash_bit].AsFloat();
+          float bit = running_sign_bit(input, weight, seed, key.get());
+          output->data.f[batch * num_hash_ * num_bits_ + hash_bit] = bit;
+        }
+      }
+    }
+  } else if (output->type == kTfLiteUInt8) {
+    const float inverse_scale = 1.0 / output->params.scale;
+    for (int batch = 0; batch < batch_size; ++batch) {
+      const std::vector<int64_t>& input = batch_ids[batch];
+      const std::vector<float>& weight = batch_weights[batch];
+
+      for (int i = 0; i < num_hash_; i++) {
+        for (int j = 0; j < num_bits_; j++) {
+          int hash_bit = i * num_bits_ + j;
+          float seed = hash_function_[hash_bit].AsFloat();
+          float bit = running_sign_bit(input, weight, seed, key.get());
+          output->data.uint8[batch * num_hash_ * num_bits_ + hash_bit] =
+              seq_flow_lite::PodQuantize(bit, output->params.zero_point,
+                                         inverse_scale);
+        }
+      }
+    }
+  }
+}
+
+namespace {
+
+int32_t hash32(int32_t value, uint32_t seed) {
+  uint32_t hash = value;
+  hash = (hash ^ 61) ^ (hash >> 16);
+  hash = hash + (hash << 3);
+  hash = hash ^ (hash >> 4);
+  hash = hash * seed;
+  hash = hash ^ (hash >> 15);
+  return static_cast<int32_t>(hash);
+}
+
+double axb(int32_t value, float seed, float scale) {
+  // Convert seed to a larger scale of range, multiplier is 1e5 to avoid
+  // precision difference on different hardware.
+  int64_t hash_signature =
+      static_cast<int64_t>(scale) * static_cast<int64_t>(seed * 1e5) * value;
+  hash_signature %= 0x100000000;
+  hash_signature = fabs(hash_signature);
+  if (hash_signature >= 0x80000000) {
+    hash_signature -= 0x100000000;
+  }
+  return hash_signature;
+}
+
+}  // namespace
+
+// Compute sign bit of dot product of hash(seed, input) and weight.
+float StringProjectionOpBase::running_sign_bit(
+    const std::vector<int64_t>& input, const std::vector<float>& weight,
+    float seed, char* key) {
+  double score = 0.0;
+  memcpy(key, &seed, kSeedSize);
+  int cnt = 0;
+  for (int i = 0; i < input.size(); ++i) {
+    if (weight[i] == 0.0) continue;
+    cnt++;
+    const int32_t curr_input = input[i];
+    memcpy(key + kSeedSize, &curr_input, kInputItemBytes);
+
+    // Create running hash id and value for current dimension.
+    if (hash_method_ == kFastHash) {
+      int32_t hash_signature =
+          hash32(input[i], *reinterpret_cast<uint32_t*>(&seed));
+      score += static_cast<double>(weight[i]) * hash_signature;
+    } else if (hash_method_ == kAXB) {
+      score += weight[i] * axb(input[i], seed, axb_scale_);
+    } else {
+      int64_t hash_signature = tc3farmhash::Fingerprint64(key, kKeyBytes);
+      double running_value = static_cast<double>(hash_signature);
+      score += weight[i] * running_value;
+    }
+  }
+
+  const double inverse_normalizer = 0.00000000046566129;
+  if (!binary_projection_) {
+    if (hash_method_ == kAXB) {
+      return tanh(score / cnt * inverse_normalizer);
+    } else {
+      return tanh(score * inverse_normalizer);
+    }
+  }
+
+  return (score > 0) ? 1 : 0;
+}
+
+void Free(TfLiteContext* context, void* buffer) {
+  delete reinterpret_cast<StringProjectionOpBase*>(buffer);
+}
+
+TfLiteStatus Resize(TfLiteContext* context, TfLiteNode* node) {
+  auto* op = reinterpret_cast<StringProjectionOpBase*>(node->user_data);
+
+  // The shape of the output should be the shape of the input + a new inner
+  // dimension equal to the number of features.
+  TfLiteIntArray* input_shape = op->GetInputShape(context, node);
+  TfLiteIntArray* output_shape = TfLiteIntArrayCreate(input_shape->size + 1);
+  for (int i = 0; i < input_shape->size; ++i) {
+    output_shape->data[i] = input_shape->data[i];
+  }
+  output_shape->data[input_shape->size] = op->num_hash() * op->num_bits();
+  context->ResizeTensor(context,
+                        &context->tensors[node->outputs->data[kOutputLabel]],
+                        output_shape);
+  return kTfLiteOk;
+}
+
+TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) {
+  auto* op = reinterpret_cast<StringProjectionOpBase*>(node->user_data);
+
+  TfLiteTensor* label = &context->tensors[node->outputs->data[kOutputLabel]];
+
+  TfLiteIntArray* input_shape = op->GetInputShape(context, node);
+  int input_size = 1;
+  for (int i = 0; i < input_shape->size; ++i) {
+    input_size *= input_shape->data[i];
+  }
+
+  TF_LITE_ENSURE_STATUS(op->InitializeInput(context, node));
+
+  std::vector<std::vector<int64_t>> batch_ids;
+  std::vector<std::vector<float>> batch_weights;
+  for (int i = 0; i < input_size; ++i) {
+    std::unordered_map<std::string, int> feature_counts =
+        op->ExtractSkipGrams(i);
+    op->GetFeatureWeights(feature_counts, &batch_ids, &batch_weights);
+  }
+
+  op->DenseLshProjection(input_size, batch_ids, batch_weights, label);
+
+  op->FinalizeInput();
+
+  return kTfLiteOk;
+}
+
+}  // namespace string_projection
+}  // namespace libtextclassifier3
+}  // namespace custom
+}  // namespace ops
+}  // namespace tflite
diff --git a/native/utils/tflite/string_projection_base.h b/native/utils/tflite/string_projection_base.h
new file mode 100644
index 0000000..61b1708
--- /dev/null
+++ b/native/utils/tflite/string_projection_base.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_UTILS_TFLITE_STRING_PROJECTION_BASE_H_
+#define LIBTEXTCLASSIFIER_UTILS_TFLITE_STRING_PROJECTION_BASE_H_
+
+/**
+ * String projection op used in Self-Governing Neural Network (SGNN)
+ * and other ProjectionNet models for text prediction.
+ * The code is copied/adapted from
+ * learning/expander/pod/deep_pod/tflite_handlers/
+ */
+
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "flatbuffers/flexbuffers.h"
+#include "tensorflow/lite/context.h"
+
+namespace tflite {
+namespace ops {
+namespace custom {
+namespace libtextclassifier3 {
+namespace string_projection {
+
+struct SkipGramParams {
+  // Num of tokens in ngram.
+  int ngram_size;
+
+  // Max num of tokens to skip in skip gram.
+  int max_skip_size;
+
+  // True when include all k-grams where k <= ngram_size.
+  bool include_all_ngrams;
+
+  // True when include preprocess.
+  bool preprocess;
+
+  // True when tokens are chars, false when tokens are whitespace separated.
+  bool char_level;
+
+  // True when punctuations are removed.
+  bool remove_punctuation;
+
+  // Max num of chars to process in input.
+  int max_input_chars;
+};
+
+/**
+ * A framework for writing TFLite ops that convert strings to integers via LSH
+ * projections.  Input is defined by the specific implementation.
+ * NOTE: Only supports dense projection.
+ *
+ * Attributes:
+ *   num_hash:           int[]
+ *     number of hash functions
+ *   num_bits:           int[]
+ *     number of bits in each hash function
+ *   hash_function:      float[num_hash * num_bits]
+ *     hash_functions used to generate projections
+ *   ngram_size:         int[]
+ *     maximum number of tokens in skipgrams
+ *   max_skip_size:      int[]
+ *     maximum number of tokens to skip between tokens in skipgrams.
+ *   include_all_ngrams: bool[]
+ *     if false, only use skipgrams with ngram_size tokens
+ *   preprocess:         bool[]
+ *     if true, normalize input strings (lower case, remove punctuation)
+ *   hash_method:        string[]
+ *     hashing function to use
+ *   char_level:         bool[]
+ *     if true, treat each character as a token
+ *   binary_projection:  bool[]
+ *     if true, output features are 0 or 1
+ *   remove_punctuation: bool[]
+ *     if true, remove punctuation during normalization/preprocessing
+ *
+ * Output:
+ *   tensor[0]: computed projections. float32[..., num_func * num_bits]
+ */
+
+class StringProjectionOpBase {
+ public:
+  explicit StringProjectionOpBase(const flexbuffers::Map& custom_options);
+
+  virtual ~StringProjectionOpBase() {}
+
+  void GetFeatureWeights(
+      const std::unordered_map<std::string, int>& feature_counts,
+      std::vector<std::vector<int64_t>>* batch_ids,
+      std::vector<std::vector<float>>* batch_weights);
+
+  void DenseLshProjection(const int batch_size,
+                          const std::vector<std::vector<int64_t>>& batch_ids,
+                          const std::vector<std::vector<float>>& batch_weights,
+                          TfLiteTensor* output);
+
+  inline int num_hash() { return num_hash_; }
+  inline int num_bits() { return num_bits_; }
+  virtual TfLiteStatus InitializeInput(TfLiteContext* context,
+                                       TfLiteNode* node) = 0;
+  virtual std::unordered_map<std::string, int> ExtractSkipGrams(int i) = 0;
+  virtual void FinalizeInput() = 0;
+
+  // Returns the input shape.  TfLiteIntArray is owned by the object.
+  virtual TfLiteIntArray* GetInputShape(TfLiteContext* context,
+                                        TfLiteNode* node) = 0;
+
+ protected:
+  SkipGramParams& skip_gram_params() { return skip_gram_params_; }
+
+ private:
+  ::flexbuffers::TypedVector hash_function_;
+  int num_hash_;
+  int num_bits_;
+  bool binary_projection_;
+  std::string hash_method_;
+  float axb_scale_;
+  SkipGramParams skip_gram_params_;
+
+  // Compute sign bit of dot product of hash(seed, input) and weight.
+  float running_sign_bit(const std::vector<int64_t>& input,
+                         const std::vector<float>& weight, float seed,
+                         char* key);
+};
+
+// Individual ops should define an Init() function that returns a
+// BlacklistOpBase.
+
+void Free(TfLiteContext* context, void* buffer);
+
+TfLiteStatus Resize(TfLiteContext* context, TfLiteNode* node);
+
+TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node);
+
+}  // namespace string_projection
+}  // namespace libtextclassifier3
+}  // namespace custom
+}  // namespace ops
+}  // namespace tflite
+
+#endif  // LIBTEXTCLASSIFIER_UTILS_TFLITE_STRING_PROJECTION_BASE_H_
diff --git a/native/utils/tokenizer-utils.cc b/native/utils/tokenizer-utils.cc
index 7d07b0c..c812acf 100644
--- a/native/utils/tokenizer-utils.cc
+++ b/native/utils/tokenizer-utils.cc
@@ -21,6 +21,8 @@
 #include "utils/codepoint-range.h"
 #include "utils/strings/utf8.h"
 #include "utils/utf8/unicodetext.h"
+#include "utils/utf8/unilib-common.h"
+#include "absl/container/flat_hash_set.h"
 
 namespace libtextclassifier3 {
 
@@ -31,43 +33,24 @@
 }
 
 std::vector<Token> TokenizeOnDelimiters(
-    const std::string& text, const std::unordered_set<char32>& delimiters,
+    const std::string& text, const absl::flat_hash_set<char32>& delimiters,
     bool create_tokens_for_non_space_delimiters) {
-  const UnicodeText unicode_text = UTF8ToUnicodeText(text, /*do_copy=*/false);
-
-  std::vector<Token> result;
-
-  int token_start_codepoint = 0;
-  auto token_start_it = unicode_text.begin();
-  int codepoint_idx = 0;
-
-  UnicodeText::const_iterator it;
-  for (it = unicode_text.begin(); it < unicode_text.end(); it++) {
-    if (delimiters.find(*it) != delimiters.end()) {
-      // Only add a token when the string is non-empty.
-      if (token_start_it != it) {
-        result.push_back(Token{UnicodeText::UTF8Substring(token_start_it, it),
-                               token_start_codepoint, codepoint_idx});
-      }
-      if (create_tokens_for_non_space_delimiters && *it != ' ') {
-        result.push_back(
-            Token{std::string(1, *it), codepoint_idx, codepoint_idx + 1});
-      }
-
-      token_start_codepoint = codepoint_idx + 1;
-      token_start_it = it;
-      token_start_it++;
-    }
-
-    codepoint_idx++;
-  }
-  // Only add a token when the string is non-empty.
-  if (token_start_it != it) {
-    result.push_back(Token{UnicodeText::UTF8Substring(token_start_it, it),
-                           token_start_codepoint, codepoint_idx});
-  }
-
-  return result;
+  return TokenizeWithFilter(text, [&](char32 codepoint) {
+    bool to_split = delimiters.find(codepoint) != delimiters.end();
+    bool to_keep =
+        (create_tokens_for_non_space_delimiters) ? codepoint != ' ' : false;
+    return FilterResult{to_split, to_keep};
+  });
 }
 
+std::vector<Token> TokenizeOnWhiteSpacePunctuationAndChineseLetter(
+    const absl::string_view text) {
+  return TokenizeWithFilter(text, [](char32 codepoint) {
+    bool is_whitespace = IsWhitespace(codepoint);
+    bool to_split =
+        is_whitespace || IsPunctuation(codepoint) || IsChineseLetter(codepoint);
+    bool to_keep = !is_whitespace;
+    return FilterResult{to_split, to_keep};
+  });
+}
 }  // namespace  libtextclassifier3
diff --git a/native/utils/tokenizer-utils.h b/native/utils/tokenizer-utils.h
index 553791b..7d850d9 100644
--- a/native/utils/tokenizer-utils.h
+++ b/native/utils/tokenizer-utils.h
@@ -22,9 +22,22 @@
 #include <string>
 
 #include "annotator/types.h"
+#include "utils/codepoint-range.h"
+#include "utils/strings/utf8.h"
+#include "utils/utf8/unicodetext.h"
+#include "absl/container/flat_hash_set.h"
+#include "absl/strings/string_view.h"
 
 namespace libtextclassifier3 {
 
+struct FilterResult {
+  // Whether split on this codepoint.
+  bool to_split;
+  // If the codepoint is used to split the text, whether to output it as a
+  // token.
+  bool to_keep;
+};
+
 // Returns a list of Tokens for a given input string, by tokenizing on space.
 std::vector<Token> TokenizeOnSpace(const std::string& text);
 
@@ -33,11 +46,60 @@
 // If create_tokens_for_non_space_delimiters is true, create tokens for
 // delimiters which are not white spaces. For example "This, is" -> {"This",
 // ",", "is"}.
-
 std::vector<Token> TokenizeOnDelimiters(
-    const std::string& text, const std::unordered_set<char32>& delimiters,
+    const std::string& text, const absl::flat_hash_set<char32>& delimiters,
     bool create_tokens_for_non_space_delimiters = false);
 
+// This replicates how the original bert_tokenizer from the tflite-support
+// library pretokenize text by using regex_split with these default regexes.
+// It splits the text on spaces, punctuations and chinese characters and
+// output all the tokens except spaces.
+// So far, the only difference between this and the original implementation
+// we are aware of is that the original regexes has 8 ranges of chinese
+// unicodes. We have all these 8 ranges plus two extra ranges.
+std::vector<Token> TokenizeOnWhiteSpacePunctuationAndChineseLetter(
+    const absl::string_view text);
+
+// Returns a list of Tokens for a given input string, by tokenizing on the
+// given filter function. Caller can control which codepoint to split and
+// whether a delimiter should be output as a token.
+template <typename FilterFn>
+std::vector<Token> TokenizeWithFilter(const absl::string_view input,
+                                      FilterFn filter) {
+  const UnicodeText input_unicode = UTF8ToUnicodeText(input, /*do_copy=*/false);
+  std::vector<Token> tokens;
+  UnicodeText::const_iterator start_it = input_unicode.begin();
+  int token_start_codepoint = 0;
+  int codepoint_idx = 0;
+
+  for (auto it = input_unicode.begin(); it != input_unicode.end(); ++it) {
+    const char32 code_point = *it;
+    FilterResult filter_result = filter(code_point);
+    if (filter_result.to_split) {
+      const std::string token_text = UnicodeText::UTF8Substring(start_it, it);
+      if (!token_text.empty()) {
+        tokens.push_back(
+            Token{token_text, token_start_codepoint, codepoint_idx});
+      }
+      if (filter_result.to_keep) {
+        const std::string delimiter =
+            UnicodeText::UTF8Substring(it, std::next(it));
+        tokens.push_back(Token{delimiter, codepoint_idx, codepoint_idx + 1});
+      }
+      start_it = std::next(it);
+      token_start_codepoint = codepoint_idx + 1;
+    }
+    codepoint_idx++;
+  }
+  // Flush the last token if any.
+  if (start_it != input_unicode.end()) {
+    const std::string token_text =
+        UnicodeText::UTF8Substring(start_it, input_unicode.end());
+    tokens.push_back(Token{token_text, token_start_codepoint, codepoint_idx});
+  }
+  return tokens;
+}
+
 }  // namespace  libtextclassifier3
 
 #endif  // LIBTEXTCLASSIFIER_UTILS_TOKENIZER_UTILS_H_
diff --git a/native/utils/tokenizer-utils_test.cc b/native/utils/tokenizer-utils_test.cc
index 9c632bd..d4a1bc0 100644
--- a/native/utils/tokenizer-utils_test.cc
+++ b/native/utils/tokenizer-utils_test.cc
@@ -140,5 +140,62 @@
   EXPECT_EQ(tokens[8].end, 36);
 }
 
+TEST(TokenizerUtilTest, SimpleEnglishWithPunctuation) {
+  absl::string_view input = "I am fine, thanks!";
+
+  std::vector<Token> tokens =
+      TokenizeOnWhiteSpacePunctuationAndChineseLetter(input);
+
+  EXPECT_THAT(tokens, testing::ElementsAreArray(
+                          {Token{"I", 0, 1}, Token{"am", 2, 4},
+                           Token{"fine", 5, 9}, Token{",", 9, 10},
+                           Token{"thanks", 11, 17}, Token{"!", 17, 18}}));
+}
+
+TEST(TokenizerUtilTest, InputDoesNotEndWithDelimiter) {
+  absl::string_view input = "Good! Cool";
+
+  std::vector<Token> tokens =
+      TokenizeOnWhiteSpacePunctuationAndChineseLetter(input);
+
+  EXPECT_THAT(tokens,
+              testing::ElementsAreArray({Token{"Good", 0, 4}, Token{"!", 4, 5},
+                                         Token{"Cool", 6, 10}}));
+}
+
+TEST(TokenizerUtilTest, OnlySpace) {
+  absl::string_view input = "  \t";
+
+  std::vector<Token> tokens =
+      TokenizeOnWhiteSpacePunctuationAndChineseLetter(input);
+
+  ASSERT_TRUE(tokens.empty());
+}
+
+TEST(TokenizerUtilTest, Punctuation) {
+  absl::string_view input = "!-/:-@[-`{-~";
+
+  std::vector<Token> tokens =
+      TokenizeOnWhiteSpacePunctuationAndChineseLetter(input);
+
+  EXPECT_THAT(tokens,
+              testing::ElementsAreArray(
+                  {Token{"!", 0, 1}, Token{"-", 1, 2}, Token{"/", 2, 3},
+                   Token{":", 3, 4}, Token{"-", 4, 5}, Token{"@", 5, 6},
+                   Token{"[", 6, 7}, Token{"-", 7, 8}, Token{"`", 8, 9},
+                   Token{"{", 9, 10}, Token{"-", 10, 11}, Token{"~", 11, 12}}));
+}
+
+TEST(TokenizerUtilTest, ChineseCharacters) {
+  absl::string_view input = "你好嗎三個字";
+
+  std::vector<Token> tokens =
+      TokenizeOnWhiteSpacePunctuationAndChineseLetter(input);
+
+  EXPECT_THAT(tokens,
+              testing::ElementsAreArray(
+                  {Token{"你", 0, 1}, Token{"好", 1, 2}, Token{"嗎", 2, 3},
+                   Token{"三", 3, 4}, Token{"個", 4, 5}, Token{"字", 5, 6}}));
+}
 }  // namespace
 }  // namespace libtextclassifier3
diff --git a/native/utils/tokenizer.fbs b/native/utils/tokenizer.fbs
old mode 100755
new mode 100644
diff --git a/native/utils/utf8/unicodetext.cc b/native/utils/utf8/unicodetext.cc
index d05e377..a8bc9fb 100644
--- a/native/utils/utf8/unicodetext.cc
+++ b/native/utils/utf8/unicodetext.cc
@@ -22,6 +22,7 @@
 
 #include "utils/base/logging.h"
 #include "utils/strings/utf8.h"
+#include "absl/strings/string_view.h"
 
 namespace libtextclassifier3 {
 
@@ -336,4 +337,8 @@
   return UTF8ToUnicodeText(str.data(), str.size(), do_copy);
 }
 
+UnicodeText UTF8ToUnicodeText(absl::string_view str, bool do_copy) {
+  return UTF8ToUnicodeText(str.data(), str.size(), do_copy);
+}
+
 }  // namespace libtextclassifier3
diff --git a/native/utils/utf8/unicodetext.h b/native/utils/utf8/unicodetext.h
index 4c1c3ce..1eb41bc 100644
--- a/native/utils/utf8/unicodetext.h
+++ b/native/utils/utf8/unicodetext.h
@@ -25,6 +25,7 @@
 #include "utils/base/integral_types.h"
 #include "utils/base/logging.h"
 #include "utils/strings/stringpiece.h"
+#include "absl/strings/string_view.h"
 
 namespace libtextclassifier3 {
 
@@ -237,6 +238,7 @@
 UnicodeText UTF8ToUnicodeText(const char* utf8_buf, bool do_copy = true);
 UnicodeText UTF8ToUnicodeText(const std::string& str, bool do_copy = true);
 UnicodeText UTF8ToUnicodeText(StringPiece str, bool do_copy = true);
+UnicodeText UTF8ToUnicodeText(absl::string_view str, bool do_copy = true);
 
 inline logging::LoggingStringStream& operator<<(
     logging::LoggingStringStream& stream, const UnicodeText& message) {
diff --git a/native/utils/variant.cc b/native/utils/variant.cc
index 0513440..ed39826 100644
--- a/native/utils/variant.cc
+++ b/native/utils/variant.cc
@@ -30,6 +30,9 @@
     case Variant::TYPE_INT_VALUE:
       return std::to_string(Value<int>());
       break;
+    case Variant::TYPE_UINT_VALUE:
+      return std::to_string(Value<unsigned int>());
+      break;
     case Variant::TYPE_INT64_VALUE:
       return std::to_string(Value<int64>());
       break;
diff --git a/native/utils/wordpiece_tokenizer.cc b/native/utils/wordpiece_tokenizer.cc
new file mode 100644
index 0000000..f4fcafc
--- /dev/null
+++ b/native/utils/wordpiece_tokenizer.cc
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "utils/wordpiece_tokenizer.h"
+
+#include "utils/utf8/unicodetext.h"
+#include "absl/strings/str_cat.h"
+#include "absl/strings/str_join.h"
+#include "absl/strings/string_view.h"
+
+namespace libtextclassifier3 {
+
+namespace {
+
+LookupStatus Lookup(int byte_start, int byte_end, const absl::string_view token,
+                    const std::string& suffix_indicator,
+                    const WordpieceVocab* vocab_map, bool* in_vocab) {
+  int byte_len = byte_end - byte_start;
+  absl::string_view substr(token.data() + byte_start, byte_len);
+  std::string lookup_value;
+  if (byte_start > 0) {
+    lookup_value = absl::StrCat(suffix_indicator, substr);
+  } else {
+    // absl::CopyToString
+    lookup_value.assign(substr.begin(), substr.end());
+  }
+  return vocab_map->Contains(lookup_value, in_vocab);
+}
+
+// Sets byte_end to the longest byte sequence which:
+// 1) is a proper UTF8 sequence
+// 2) is in the vocab OR if split_unknown_characters is true, is a single
+//    UTF8 character.
+// If no match is found, found_match is set to false.
+LookupStatus LongestMatchStartingAt(
+    int byte_start, const absl::string_view token,
+    const std::string& suffix_indicator, const int max_chars_per_subtoken,
+    bool split_unknown_characters, const WordpieceVocab* vocab_map,
+    int* byte_end, bool* found_match, bool* match_is_unknown_character) {
+  *match_is_unknown_character = false;
+  *found_match = false;
+  const UnicodeText unicode_token =
+      UTF8ToUnicodeText(token.substr(byte_start), /*do_copy=*/false);
+  std::vector<int32_t> byte_ends;
+  int32_t codepoint_offset = byte_start;
+  for (auto it = unicode_token.begin(); it != unicode_token.end(); ++it) {
+    codepoint_offset += it.utf8_length();
+    byte_ends.push_back(codepoint_offset);
+    if (max_chars_per_subtoken > 0 &&
+        byte_ends.size() == max_chars_per_subtoken) {
+      // If the max bytes of a subtoken is known, do not search beyond that
+      // length.
+      break;
+    }
+  }
+  int n = byte_ends.size();
+  for (int i = n - 1; i >= 0; i--) {
+    bool in_vocab;
+    auto status = Lookup(byte_start, byte_ends[i], token, suffix_indicator,
+                         vocab_map, &in_vocab);
+    if (!status.success) return status;
+    if (in_vocab) {
+      *byte_end = byte_ends[i];
+      *found_match = true;
+      return LookupStatus::OK();
+    }
+    if (i == 0 && split_unknown_characters) {
+      *byte_end = byte_ends[0];
+      *found_match = true;
+      *match_is_unknown_character = true;
+      return LookupStatus::OK();
+    }
+  }
+  return LookupStatus::OK();
+}
+
+// Sets the outputs 'begin_offset', 'end_offset' and 'num_word_pieces' when no
+// token is found.
+LookupStatus NoTokenFound(const absl::string_view token, bool use_unknown_token,
+                          const std::string& unknown_token,
+                          std::vector<std::string>* subwords,
+                          std::vector<int>* begin_offset,
+                          std::vector<int>* end_offset, int* num_word_pieces) {
+  begin_offset->push_back(0);
+  if (use_unknown_token) {
+    subwords->push_back(unknown_token);
+    end_offset->push_back(token.length());
+  } else {
+    subwords->emplace_back(token.data(), token.length());
+    end_offset->push_back(token.length());
+  }
+  ++(*num_word_pieces);
+
+  return LookupStatus::OK();
+}
+
+// When a subword is found, this helper function will add the outputs to
+// 'subwords', 'begin_offset' and 'end_offset'.
+void AddWord(const absl::string_view token, int byte_start, int byte_end,
+             const std::string& suffix_indicator,
+             std::vector<std::string>* subwords, std::vector<int>* begin_offset,
+             std::vector<int>* end_offset) {
+  begin_offset->push_back(byte_start);
+  int len = byte_end - byte_start;
+
+  if (byte_start > 0) {
+    // Prepend suffix_indicator if the token is within a word.
+    subwords->push_back(::absl::StrCat(
+        suffix_indicator, absl::string_view(token.data() + byte_start, len)));
+  } else {
+    subwords->emplace_back(token.data(), len);
+  }
+  end_offset->push_back(byte_end);
+}
+
+// Adds a single unknown character subword, found when split_unknown_characters
+// is true.
+void AddUnknownCharacter(const absl::string_view token, int byte_start,
+                         int byte_end, const std::string& suffix_indicator,
+                         bool use_unknown_token,
+                         const std::string& unknown_token,
+                         std::vector<std::string>* subwords,
+                         std::vector<int>* begin_offset,
+                         std::vector<int>* end_offset) {
+  begin_offset->push_back(byte_start);
+  end_offset->push_back(byte_end);
+  int len = byte_end - byte_start;
+  if (use_unknown_token) {
+    if (byte_start > 0) {
+      // Prepend suffix_indicator if the character is within a word.
+      subwords->push_back(::absl::StrCat(suffix_indicator, unknown_token));
+    } else {
+      subwords->push_back(unknown_token);
+    }
+  } else {
+    if (byte_start > 0) {
+      // Prepend suffix_indicator if the character is within a word.
+      subwords->push_back(::absl::StrCat(
+          suffix_indicator, absl::string_view(token.data() + byte_start, len)));
+    } else {
+      subwords->emplace_back(token.data(), len);
+    }
+  }
+}
+
+LookupStatus TokenizeL2RGreedy(
+    const absl::string_view token, const int max_bytes_per_token,
+    const int max_chars_per_subtoken, const std::string& suffix_indicator,
+    bool use_unknown_token, const std::string& unknown_token,
+    bool split_unknown_characters, const WordpieceVocab* vocab_map,
+    std::vector<std::string>* subwords, std::vector<int>* begin_offset,
+    std::vector<int>* end_offset, int* num_word_pieces) {
+  std::vector<std::string> candidate_subwords;
+  std::vector<int> candidate_begin_offsets;
+  std::vector<int> candidate_end_offsets;
+  const int token_len = token.length();
+  for (int byte_start = 0; byte_start < token_len;) {
+    int byte_end;
+    bool found_subword;
+    bool match_is_unknown_character;
+    auto status = LongestMatchStartingAt(
+        byte_start, token, suffix_indicator, max_chars_per_subtoken,
+        split_unknown_characters, vocab_map, &byte_end, &found_subword,
+        &match_is_unknown_character);
+    if (!status.success) return status;
+    if (found_subword) {
+      if (match_is_unknown_character) {
+        AddUnknownCharacter(token, byte_start, byte_end, suffix_indicator,
+                            use_unknown_token, unknown_token,
+                            &candidate_subwords, &candidate_begin_offsets,
+                            &candidate_end_offsets);
+      } else {
+        AddWord(token, byte_start, byte_end, suffix_indicator,
+                &candidate_subwords, &candidate_begin_offsets,
+                &candidate_end_offsets);
+      }
+      byte_start = byte_end;
+    } else {
+      return NoTokenFound(token, use_unknown_token, unknown_token, subwords,
+                          begin_offset, end_offset, num_word_pieces);
+    }
+  }
+
+  subwords->insert(subwords->end(), candidate_subwords.begin(),
+                   candidate_subwords.end());
+  begin_offset->insert(begin_offset->end(), candidate_begin_offsets.begin(),
+                       candidate_begin_offsets.end());
+  end_offset->insert(end_offset->end(), candidate_end_offsets.begin(),
+                     candidate_end_offsets.end());
+  *num_word_pieces += candidate_subwords.size();
+  return LookupStatus::OK();
+}
+
+}  // namespace
+
+LookupStatus WordpieceTokenize(
+    const absl::string_view token, const int max_bytes_per_token,
+    const int max_chars_per_subtoken, const std::string& suffix_indicator,
+    bool use_unknown_token, const std::string& unknown_token,
+    bool split_unknown_characters, const WordpieceVocab* vocab_map,
+    std::vector<std::string>* subwords, std::vector<int>* begin_offset,
+    std::vector<int>* end_offset, int* num_word_pieces) {
+  int token_len = token.size();
+  if (token_len > max_bytes_per_token) {
+    begin_offset->push_back(0);
+    *num_word_pieces = 1;
+    if (use_unknown_token) {
+      end_offset->push_back(unknown_token.size());
+      subwords->emplace_back(unknown_token);
+    } else {
+      subwords->emplace_back(token);
+      end_offset->push_back(token.size());
+    }
+    return LookupStatus::OK();
+  }
+  return TokenizeL2RGreedy(token, max_bytes_per_token, max_chars_per_subtoken,
+                           suffix_indicator, use_unknown_token, unknown_token,
+                           split_unknown_characters, vocab_map, subwords,
+                           begin_offset, end_offset, num_word_pieces);
+}
+
+LookupStatus WordpieceTokenize(
+    const absl::string_view token, const int max_bytes_per_token,
+    const std::string& suffix_indicator, bool use_unknown_token,
+    const std::string& unknown_token, const WordpieceVocab* vocab_map,
+    std::vector<std::string>* subwords, std::vector<int>* begin_offset,
+    std::vector<int>* end_offset, int* num_word_pieces) {
+  return WordpieceTokenize(token, max_bytes_per_token,
+                           /* max_chars_per_subtoken= */ 0, suffix_indicator,
+                           use_unknown_token, unknown_token,
+                           /* split_unknown_characters= */ false, vocab_map,
+                           subwords, begin_offset, end_offset, num_word_pieces);
+}
+}  // namespace libtextclassifier3
diff --git a/native/utils/wordpiece_tokenizer.h b/native/utils/wordpiece_tokenizer.h
new file mode 100644
index 0000000..a6eb8e0
--- /dev/null
+++ b/native/utils/wordpiece_tokenizer.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBTEXTCLASSIFIER_UTILS_WORDPIECE_TOKENIZER_H_
+#define LIBTEXTCLASSIFIER_UTILS_WORDPIECE_TOKENIZER_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+
+namespace libtextclassifier3 {
+
+struct LookupStatus {
+  LookupStatus() : error_msg(""), success(true) {}
+  explicit LookupStatus(const std::string& msg)
+      : error_msg(msg), success(false) {}
+  std::string error_msg;
+  bool success;
+
+  static LookupStatus OK() { return LookupStatus(); }
+};
+
+class WordpieceVocab {
+ public:
+  virtual ~WordpieceVocab() {}
+  virtual LookupStatus Contains(const absl::string_view key,
+                                bool* value) const = 0;
+};
+
+LookupStatus WordpieceTokenize(
+    const absl::string_view token, const int max_bytes_per_token,
+    const int max_chars_per_subtoken, const std::string& suffix_indicator,
+    bool use_unknown_token, const std::string& unknown_token,
+    bool split_unknown_characters, const WordpieceVocab* vocab_map,
+    std::vector<std::string>* subwords, std::vector<int>* begin_offset,
+    std::vector<int>* end_offset, int* num_word_pieces);
+
+// As above but with `max_bytes_per_subtoken` unknown,
+// and split_unknown_characters=false. (For backwards compatibility.)
+LookupStatus WordpieceTokenize(
+    const absl::string_view token, const int max_bytes_per_token,
+    const std::string& suffix_indicator, bool use_unknown_token,
+    const std::string& unknown_token, const WordpieceVocab* vocab_map,
+    std::vector<std::string>* subwords, std::vector<int>* begin_offset,
+    std::vector<int>* end_offset, int* num_word_pieces);
+
+}  // namespace libtextclassifier3
+
+#endif  // LIBTEXTCLASSIFIER_UTILS_WORDPIECE_TOKENIZER_H_
diff --git a/native/utils/zlib/buffer.fbs b/native/utils/zlib/buffer.fbs
old mode 100755
new mode 100644
diff --git a/notification/Android.bp b/notification/Android.bp
index 277985b..782d5cb 100644
--- a/notification/Android.bp
+++ b/notification/Android.bp
@@ -28,7 +28,7 @@
     name: "TextClassifierNotificationLib",
     static_libs: ["TextClassifierNotificationLibNoManifest"],
     sdk_version: "system_current",
-    min_sdk_version: "29",
+    min_sdk_version: "30",
     manifest: "AndroidManifest.xml",
 }
 
@@ -41,6 +41,6 @@
         "guava",
     ],
     sdk_version: "system_current",
-    min_sdk_version: "29",
+    min_sdk_version: "30",
     manifest: "LibNoManifest_AndroidManifest.xml",
 }
diff --git a/notification/AndroidManifest.xml b/notification/AndroidManifest.xml
index 3153d1d..5a98ea3 100644
--- a/notification/AndroidManifest.xml
+++ b/notification/AndroidManifest.xml
@@ -1,7 +1,7 @@
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
     package="com.android.textclassifier.notification">
 
-  <uses-sdk android:minSdkVersion="29" />
+  <uses-sdk android:minSdkVersion="30" />
 
   <application>
     <activity
@@ -10,4 +10,4 @@
         android:theme="@android:style/Theme.NoDisplay" />
   </application>
 
-</manifest>
\ No newline at end of file
+</manifest>
diff --git a/notification/LibNoManifest_AndroidManifest.xml b/notification/LibNoManifest_AndroidManifest.xml
index b9ebf7d..06e8da4 100644
--- a/notification/LibNoManifest_AndroidManifest.xml
+++ b/notification/LibNoManifest_AndroidManifest.xml
@@ -25,6 +25,6 @@
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
           package="com.android.textclassifier.notification">
 
-    <uses-sdk android:minSdkVersion="29" android:targetSdkVersion="29"/>
+    <uses-sdk android:minSdkVersion="30" android:targetSdkVersion="30"/>
 
 </manifest>
diff --git a/notification/tests/Android.bp b/notification/tests/Android.bp
index cafd8a3..48c6324 100644
--- a/notification/tests/Android.bp
+++ b/notification/tests/Android.bp
@@ -42,7 +42,7 @@
     ],
 
     test_suites: [
-        "device-tests", "mts-extservices"
+        "general-tests", "mts-extservices"
     ],
 
     min_sdk_version: "30",
diff --git a/notification/tests/AndroidManifest.xml b/notification/tests/AndroidManifest.xml
index 81308e3..d3da067 100644
--- a/notification/tests/AndroidManifest.xml
+++ b/notification/tests/AndroidManifest.xml
@@ -2,8 +2,8 @@
     package="com.android.textclassifier.notification">
 
   <uses-sdk
-      android:minSdkVersion="29"
-      android:targetSdkVersion="29" />
+      android:minSdkVersion="30"
+      android:targetSdkVersion="30" />
 
   <application>
     <uses-library android:name="android.test.runner"/>
diff --git a/notification/tests/AndroidTest.xml b/notification/tests/AndroidTest.xml
index 1890e75..0f60d10 100644
--- a/notification/tests/AndroidTest.xml
+++ b/notification/tests/AndroidTest.xml
@@ -13,8 +13,8 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<!-- This test config file is auto-generated. -->
 <configuration description="Runs TextClassifierNotificationTests.">
+    <option name="config-descriptor:metadata" key="mainline-param" value="com.google.android.extservices.apex" />
     <option name="test-suite-tag" value="apct" />
     <option name="test-suite-tag" value="apct-instrumentation" />
     <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
diff --git a/proto/Android.bp b/proto/Android.bp
deleted file mode 100644
index c1c2825..0000000
--- a/proto/Android.bp
+++ /dev/null
@@ -1,31 +0,0 @@
-//
-// Copyright (C) 2019 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-java_library {
-    name: "textclassifier-java-proto-lite",
-    proto: {
-        type: "lite",
-    },
-    srcs: ["*.proto"],
-    sdk_version: "current",
-    min_sdk_version: "30",
-    jarjar_rules: "jarjar-rules.txt",
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.extservices",
-    ],
-}
-
diff --git a/proto/jarjar-rules.txt b/proto/jarjar-rules.txt
deleted file mode 100644
index 82a5b76..0000000
--- a/proto/jarjar-rules.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-# Use our statically linked protobuf library
-rule com.google.protobuf.** com.android.textclassifier.protobuf.@1
diff --git a/proto/model_manifest.proto b/proto/model_manifest.proto
deleted file mode 100644
index 5b969a7..0000000
--- a/proto/model_manifest.proto
+++ /dev/null
@@ -1,46 +0,0 @@
-//
-// Copyright (C) 2018 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package textclassifier.tcs;
-
-option java_package = "com.android.textclassifier";
-option java_multiple_files = true;
-
-// Manifest for a download task.
-message ModelManifest {
-  message Model {
-    string url = 1;
-    int64 size_in_bytes = 2;
-    string fingerprint = 3;
-  }
-  // All models to download in this task. Currently the size is always one.
-  repeated Model models = 1;
-  // Consistent with androidx.work.NetworkType.
-  enum NetworkType {
-    UNMETERED = 0;
-    METERED = 1;
-    NOT_REQUIRED = 2;
-    NOT_ROAMING = 3;
-    CONNECTED = 4;
-  }
-  NetworkType required_network_type = 2;
-  bool requires_battery_not_low = 3;
-  bool requires_charging = 4;
-  bool requires_device_idle = 5;
-  bool requires_storage_not_low = 6;
-}