Add q8 model

This commit is contained in:
Aleksandras Kostarevas 2023-10-16 17:31:43 +03:00
parent ec7cae6bbf
commit c73fe16ddc
6 changed files with 6 additions and 7 deletions

3
.gitignore vendored
View File

@ -6,4 +6,5 @@ LatinIME.iml
build/ build/
local.properties local.properties
crashreporting.properties crashreporting.properties
keystore.properties keystore.properties
key.jks

View File

@ -1,2 +0,0 @@
*.gguf
*tokenizer.model

BIN
java/res/raw/ml3_q8.gguf Normal file

Binary file not shown.

Binary file not shown.

View File

@ -83,16 +83,16 @@ public class LanguageModel extends Dictionary {
@Override public void run() { @Override public void run() {
if(mNativeState != 0) return; if(mNativeState != 0) return;
String modelPath = getPathToModelResource(context, R.raw.ml3, R.raw.ml3_tokenizer, true); String modelPath = getPathToModelResource(context, R.raw.ml3_q8, R.raw.ml3_tokenizer, false);
mNativeState = openNative(modelPath); mNativeState = openNative(modelPath);
if(mNativeState == 0){ if(mNativeState == 0){
modelPath = getPathToModelResource(context, R.raw.ml3, R.raw.ml3_tokenizer, true); modelPath = getPathToModelResource(context, R.raw.ml3_q8, R.raw.ml3_tokenizer, true);
mNativeState = openNative(modelPath); mNativeState = openNative(modelPath);
} }
if(mNativeState == 0){ if(mNativeState == 0){
throw new RuntimeException("Failed to load R.raw.ml3, R.raw.ml3_tokenizer model"); throw new RuntimeException("Failed to load R.raw.ml3_q8, R.raw.ml3_tokenizer model");
} }
} }
}; };

View File

@ -17,7 +17,7 @@ LOCAL_PATH := $(call my-dir)
############ some local flags ############ some local flags
# If you change any of those flags, you need to rebuild both libjni_latinime_common_static # If you change any of those flags, you need to rebuild both libjni_latinime_common_static
# and the shared library that uses libjni_latinime_common_static. # and the shared library that uses libjni_latinime_common_static.
FLAG_DBG ?= true FLAG_DBG ?= false
FLAG_DO_PROFILE ?= false FLAG_DO_PROFILE ?= false
###################################### ######################################