Running birds tensorflowlite example via JNI - module name conflict
GarethEgerton opened this issue · comments
Summary
I am trying to implement the birds example from here https://wasmedge.org/docs/contribute/source/os/android/ndk/ using the JNI. My JNI function is being called via NativeLib in Kotlin. The following code compiles without errors and creates an apk containing the following in /lib:
libtensorflowlite_c.so
libwasmedge.so
libwasmedge-image_c.so
libwasmedge-tensorflowlite_c.so
libwasm_inference.so
Here is the first part of my wasm_inference CMakeLists.txt which uses WasmEdge-tensorflow-tools with the following settings:
cmake_minimum_required(VERSION 3.22.1)
project("wasm_inference")
set(CMAKE_VERBOSE_MAKEFILE ON)
set(WASMEDGE_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../../../../WasmEdge-tensorflow-tools )
set(WASMEDGE_BUILD_PLUGINS OFF CACHE BOOL "" FORCE)
set(WASMEDGE_BUILD_TOOLS OFF CACHE BOOL "" FORCE)
set(WASMEDGE_BUILD_SHARED_LIB ON CACHE BOOL "" FORCE)
set(WASMEDGE_USE_LLVM OFF CACHE BOOL "" FORCE)s
set(WASMEDGE_FORCE_DISABLE_LTO ON CACHE BOOL "" FORCE)
set(WASMEDGE_IMAGE_BUILD_SHARED_LIB ON CACHE BOOL "Generate the libwasmedge-image_c shared library." FORCE)
set(WASMEDGE_TENSORFLOW_BUILD_SHARED_LIB ON CACHE BOOL "Generate the libwasmedge-image_c shared library." FORCE)
When I run the app in debug mode and step through the JNI function as follows, I get "module name conflict" WasmEdge_ErrCategory_WASM for these three lines:
WasmEdge_VMRegisterModuleFromImport(VMCxt, TFLiteImpObj);
WasmEdge_VMRegisterModuleFromImport(VMCxt, TFLiteImpObj);
WasmEdge_VMRegisterModuleFromImport(VMCxt, TFDummyImpObj);
Here is the full code for my JNI function, which is able compile and run, however with the above errors
#include <jni.h>
#include <string>
#include <vector>
#include <iostream>
#include <fstream>
#include <cstdio>
#include <cstdint>
#include <wasmedge/wasmedge.h>
#include "wasmedge-image.h"
#include <wasmedge/wasmedge-tensorflowlite.h>
extern "C"
JNIEXPORT jint JNICALL
Java_com_example_android_octaidroid_NativeLib_nativeWasmInference(
JNIEnv *env, jobject,
jstring wasm_file,
jstring model_path,
jstring image_path) {
const char *wasmFilePath = env->GetStringUTFChars(wasm_file, nullptr);
const char *wasmModelPath = env->GetStringUTFChars(model_path, nullptr);
const char *wasmImagePath = env->GetStringUTFChars(image_path, nullptr);
WasmEdge_Result Res;
/* Create the VM context. */
WasmEdge_ConfigureContext *ConfCxt = WasmEdge_ConfigureCreate();
WasmEdge_ConfigureAddHostRegistration(ConfCxt, WasmEdge_HostRegistration_Wasi);
WasmEdge_VMContext *VMCxt = WasmEdge_VMCreate(ConfCxt, NULL);
WasmEdge_ConfigureDelete(ConfCxt);
/* Create the image and TFLite import objects. */
WasmEdge_ModuleInstanceContext *ImageImpObj = WasmEdge_Image_ModuleInstanceCreate();
WasmEdge_ModuleInstanceContext *TFLiteImpObj = WasmEdge_TensorflowLite_ModuleInstanceCreate();
WasmEdge_ModuleInstanceContext *TFDummyImpObj = WasmEdge_Tensorflow_ModuleInstanceCreateDummy();
/* Register into VM. */
Res = WasmEdge_VMRegisterModuleFromImport(VMCxt, ImageImpObj);
handle_WasmEdge_result(Res); // <-- Error here: "module name conflict" WasmEdge_ErrCategory_WASM
Res = WasmEdge_VMRegisterModuleFromImport(VMCxt, TFLiteImpObj);
handle_WasmEdge_result(Res); // <-- Error here: "module name conflict" WasmEdge_ErrCategory_WASM
Res = WasmEdge_VMRegisterModuleFromImport(VMCxt, TFDummyImpObj);
handle_WasmEdge_result(Res); // <-- Error here: "module name conflict" WasmEdge_ErrCategory_WASM
/* Init WASI. */
const char *Preopens[] = {".:."};
const char *Args[] = {wasmFilePath, wasmModelPath, wasmImagePath};
WasmEdge_ModuleInstanceContext *WASIImpObj = WasmEdge_VMGetImportModuleContext(VMCxt, WasmEdge_HostRegistration_Wasi);
WasmEdge_ModuleInstanceInitWASI(WASIImpObj, Args, 3, NULL, 0, Preopens, 1);
/* Run WASM file. */
WasmEdge_String FuncName = WasmEdge_StringCreateByCString("_start");
Res = WasmEdge_VMRunWasmFromFile(VMCxt, wasmFilePath, FuncName, NULL, 0, NULL, 0);
WasmEdge_StringDelete(FuncName);
/* Check the result. */
if (!WasmEdge_ResultOK(Res)) {
printf("Run WASM failed: %s\n", WasmEdge_ResultGetMessage(Res));
return -1;
}
WasmEdge_ModuleInstanceDelete(ImageImpObj);
WasmEdge_ModuleInstanceDelete(TFLiteImpObj);
WasmEdge_ModuleInstanceDelete(TFDummyImpObj);
WasmEdge_VMDelete(VMCxt);
return 0;
}
Please advise, it would be much appreciated if you could guide me as to what could be causing these conflict errors, how can I resolve?
Many thanks
Appendix
No response
Hi @GarethEgerton
The module name conflict is a false positive error. It should be fixed in the master branch, and it will be gone in the next release.
Thanks for the info @hydai. I will ignore the name conflict errors in this case if they are false positives.
I have a bit more detailed example as follows, I am still getting an unknown import
error that I am trying to resolve:
extern "C"
JNIEXPORT jint JNICALL
Java_com_example_android_octaidroid_NativeLib_nativeWasmInference(
JNIEnv *env, jobject,
jstring wasm_file,
jstring model_path,
jstring image_path) {
// 1. Set the vm
WasmEdge_ConfigureContext *ConfCxt = WasmEdge_ConfigureCreate();
WasmEdge_ConfigureAddHostRegistration(ConfCxt, WasmEdge_HostRegistration_Wasi);
/* Create the image and TFLite import objects. */
WasmEdge_VMContext *VMCxt = WasmEdge_VMCreate(ConfCxt, nullptr);
WasmEdge_ModuleInstanceContext *ImageImpObj = WasmEdge_Image_ModuleInstanceCreate();
WasmEdge_ModuleInstanceContext *TFLiteImpObj = WasmEdge_TensorflowLite_ModuleInstanceCreate();
WasmEdge_ModuleInstanceContext *TFDummyImpObj = WasmEdge_Tensorflow_ModuleInstanceCreateDummy();
WasmEdge_Result Res;
/* Register into VM. */
Res = WasmEdge_VMRegisterModuleFromImport(VMCxt, ImageImpObj);
handle_WasmEdge_result(Res);
Res = WasmEdge_VMRegisterModuleFromImport(VMCxt, TFLiteImpObj);
handle_WasmEdge_result(Res);
Res = WasmEdge_VMRegisterModuleFromImport(VMCxt, TFDummyImpObj);
handle_WasmEdge_result(Res);
const char *wasmFilePath = env->GetStringUTFChars(wasm_file, nullptr);
Res = WasmEdge_VMLoadWasmFromFile(VMCxt, wasmFilePath);
handle_WasmEdge_result(Res);
Res = WasmEdge_VMValidate(VMCxt);
handle_WasmEdge_result(Res);
Res = WasmEdge_VMInstantiate(VMCxt);
handle_WasmEdge_result(Res);
// 2. Set up the memory
WasmEdge_Limit MemLimit = {.HasMax = false, .Shared = false, .Min = 1};
WasmEdge_MemoryTypeContext *MemTypeCxt = WasmEdge_MemoryTypeCreate(MemLimit);
WasmEdge_MemoryInstanceContext *HostMemory = WasmEdge_MemoryInstanceCreate(MemTypeCxt);
WasmEdge_MemoryTypeDelete(MemTypeCxt);
const WasmEdge_ModuleInstanceContext *ModuleInst = WasmEdge_VMGetActiveModule(VMCxt);
WasmEdge_String memoryString = WasmEdge_StringCreateByCString("memory");
WasmEdge_MemoryInstanceContext *MemInst = WasmEdge_ModuleInstanceFindMemory(
ModuleInst, WasmEdge_StringCreateByCString("memory"));
WasmEdge_StringDelete(memoryString);
// 3 pass the model and image data to the wasm module memory
std::string modelPath = env->GetStringUTFChars(model_path, nullptr);
std::vector<uint8_t> modelFileBytes = get_file_bytes(modelPath);
std::string imagePath = env->GetStringUTFChars(image_path, nullptr);
std::vector<uint8_t> imageFileBytes = get_file_bytes(imagePath);
uint64_t modelBytesLocation = allocate_and_set_data(VMCxt, MemInst, modelFileBytes);
uint64_t imageBytesLocation = allocate_and_set_data(VMCxt, MemInst, imageFileBytes);
// 4. run the function
WasmEdge_Value funcArgs[2] = {
WasmEdge_ValueGenI64(modelBytesLocation),
WasmEdge_ValueGenI64(imageBytesLocation)
};
uint64_t outputLocation = run_function(VMCxt, "run_inference", funcArgs, 2);
std::vector<uint8_t> outputData = get_data(VMCxt, MemInst, outputLocation);
std::string dataString(reinterpret_cast<char *>(outputData.data()), outputData.size());
// 5. Clean up
// make a list of the location pointers to delete
std::vector<uint64_t> outputLocationsToDelete = {outputLocation};
for (auto location : outputLocationsToDelete) {
WasmEdge_Value Params[1] = {WasmEdge_ValueGenI64(location)};
outputLocation = run_function(VMCxt, "deallocate", Params, 1);
}
WasmEdge_VMDelete(VMCxt);
return dataString.size();
}
If I run a simple rust wasm example called "run_inference" such as follows:
use std::{ mem, slice};
use wasmedge_ffi::{pack, unpack, allocate, deallocate, greet, read_file_from_bytes};
#[no_mangle]
pub extern fn run_inference(model_location: i64, image_location: i64) -> i64 {
let (model_ptr, model_size) = unpack(model_location);
let model = unsafe { slice::from_raw_parts(model_ptr as *const u8, model_size as usize) };
let (image_ptr, image_size) = unpack(image_location);
let image = unsafe { slice::from_raw_parts(image_ptr as *con
// Return byte length of last byte
let mut output = model[model.len()-1..].to_vec();
output.extend(&image[image.len()-1..]);
let ptr = output.as_ptr() as i32;
let size = output.len() as i32;
// Prevent Rust from reclaiming the memory associated with the string
mem::forget(output);
// Deallocate the memory associated with the input bytes
deallocate(model_location);
deallocate(image_location);
pack(ptr, size)
}
I am able to return a correct value "2" for length of 2 bytes.
However if I try and import use wasmedge_tensorflow_interface;
within my rust wasm module, when I get to this line in the c++ code:
Res = WasmEdge_VMInstantiate(VMCxt);
handle_WasmEdge_result(Res);
I get an error "unknown import", which to me is suggesting that it cannot find wasmedge_tensorflow_interface as this is the only additional import I have added in my wasm module.
Please advise, do you know what could be the issue here?
@hydai Can you confirm if the wasmedge_tensorflow_interface in this example https://github.com/second-state/WasmEdge-WASINN-examples/blob/master/wasmedge-tf-mobilenet_v2/rust/src/main.rs is compatible with the latest wasmedge-tensorflow-tools https://github.com/second-state/WasmEdge-tensorflow-tools as I am still getting an unknown import error when I try and instantiate the VM with this. Perhaps its the case that I have not built or linked the .so files correctly.