diff --git a/.envrc b/.envrc index d7ba4b9286..d823f60f4b 100644 --- a/.envrc +++ b/.envrc @@ -1,9 +1,9 @@ # shellcheck shell=bash -if ! has nix_direnv_version || ! nix_direnv_version 3.0.4; then - source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.4/direnvrc" "sha256-DzlYZ33mWF/Gs8DDeyjr8mnVmQGx7ASYqA5WlxwvBG4=" -fi +#if ! has nix_direnv_version || ! nix_direnv_version 3.0.4; then +# source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.4/direnvrc" "sha256-DzlYZ33mWF/Gs8DDeyjr8mnVmQGx7ASYqA5WlxwvBG4=" +#fi -dotenv_if_exists +#dotenv_if_exists -use flake -watch_file "$(find ./ -name "*.nix" -printf '"%p" ')" \ No newline at end of file +#use flake +#watch_file "$(find ./ -name "*.nix" -printf '"%p" ')" diff --git a/Cargo.lock b/Cargo.lock index 6153ab7932..681b6cdc9d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5277,38 +5277,17 @@ dependencies = [ "tokio-stream", ] -[[package]] -name = "openvino" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24bd3a7ef39968e6a4f1b1206c1c876f9bd50cf739ccbcd69f8539bbac5dcc7a" -dependencies = [ - "openvino-finder 0.6.0", - "openvino-sys 0.6.0", - "thiserror", -] - [[package]] name = "openvino" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aee013796927eec6012a344f10ecdc06bf26de79c626a2395e3f115464907ef6" dependencies = [ - "openvino-finder 0.7.2", - "openvino-sys 0.7.2", + "openvino-finder", + "openvino-sys", "thiserror", ] -[[package]] -name = "openvino-finder" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d234d1394a413ea8adaf0c40806b9ad1946be6310b441f688840654a331973" -dependencies = [ - "cfg-if", - "log", -] - [[package]] name = "openvino-finder" version = "0.7.2" @@ -5319,18 +5298,6 @@ dependencies = [ "log", ] -[[package]] -name = "openvino-sys" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c98acf37fc84ad9d7da4dc6c18f0f60ad209b43a6f555be01f9003d0a2a43d" -dependencies = [ - "env_logger", - "libloading", - "once_cell", - "openvino-finder 0.6.0", -] - [[package]] name = "openvino-sys" version = "0.7.2" @@ -5340,7 +5307,7 @@ dependencies = [ "env_logger", "libloading", "once_cell", - "openvino-finder 0.7.2", + "openvino-finder", ] [[package]] @@ -5377,33 +5344,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "ort" -version = "2.0.0-rc.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86d83095ae3c1258738d70ae7a06195c94d966a8e546f0d3609dc90885fb61f5" -dependencies = [ - "js-sys", - "libloading", - "ort-sys", - "thiserror", - "tracing", - "web-sys", -] - -[[package]] -name = "ort-sys" -version = "2.0.0-rc.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f2f6427193c808010b126bef45ebd33f8dee43770223a1200f84d3734d6c656" -dependencies = [ - "flate2", - "pkg-config", - "sha2", - "tar", - "ureq", -] - [[package]] name = "os_str_bytes" version = "6.6.1" @@ -6824,21 +6764,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "rustls" -version = "0.23.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebbbdb961df0ad3f2652da8f3fdc4b36122f568f968f45ad3316f26c025c677b" -dependencies = [ - "log", - "once_cell", - "ring 0.17.8", - "rustls-pki-types", - "rustls-webpki 0.102.2", - "subtle", - "zeroize", -] - [[package]] name = "rustls-ffi" version = "0.8.2" @@ -7504,17 +7429,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "socks" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c3dbbd9ae980613c6dd8e28a9407b50509d3803b57624d5dfe8315218cd58b" -dependencies = [ - "byteorder", - "libc", - "winapi", -] - [[package]] name = "spdx" version = "0.10.4" @@ -7706,7 +7620,7 @@ dependencies = [ "futures", "http 1.1.0", "io-extras", - "openvino 0.6.0", + "openvino", "rustix 0.37.27", "spin-componentize", "spin-telemetry", @@ -7966,8 +7880,7 @@ dependencies = [ "anyhow", "curl", "hex-literal", - "openvino 0.7.2", - "ort", + "openvino", "sha1 0.10.6", "spin-app", "spin-core", @@ -9374,22 +9287,6 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" -[[package]] -name = "ureq" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a" -dependencies = [ - "base64 0.22.0", - "log", - "once_cell", - "rustls 0.23.7", - "rustls-pki-types", - "socks", - "url", - "webpki-roots 0.26.1", -] - [[package]] name = "url" version = "2.5.0" diff --git a/crates/componentize/src/abi_conformance/mod.rs b/crates/componentize/src/abi_conformance/mod.rs index 2d192b462d..995b0e12ed 100644 --- a/crates/componentize/src/abi_conformance/mod.rs +++ b/crates/componentize/src/abi_conformance/mod.rs @@ -12,12 +12,12 @@ //! (i.e. inbound requests) or by the host (i.e. outbound requests). //! //! - For a guest-implemented function, the host will call the function and assert the result matches what is -//! expected (see [`Report::inbound_http`] for an example). +//! expected (see [`Report::inbound_http`] for an example). //! //! - For a host-implemented function, the host will call a guest-implemented function according to the specified -//! [`InvocationStyle`] with a set of arguments indicating which host function to call and with what arguments. -//! The host then asserts that host function was indeed called with the expected arguments (see -//! [`Report::http`] for an example). +//! [`InvocationStyle`] with a set of arguments indicating which host function to call and with what arguments. +//! The host then asserts that host function was indeed called with the expected arguments (see +//! [`Report::http`] for an example). #![deny(warnings)] @@ -88,7 +88,7 @@ pub struct TestConfig { /// - Guest-implemented exports which behave as prescribed by the test (e.g. `inbound_http` and `inbound_redis`) /// /// - Host-implemented imports which are called by the guest with the arguments specified by the host -/// (e.g. `http`) +/// (e.g. `http`) #[derive(Serialize, PartialEq, Eq, Debug)] pub struct Report { /// Result of the Spin inbound HTTP test diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 0dd1959373..21be704318 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -20,7 +20,7 @@ tokio = "1.0" bytes = "1.0" spin-telemetry = { path = "../telemetry" } http = "1.0" -openvino = { version = "0.6.0", features = ["runtime-linking",] } +openvino = { version = "0.7.2", features = ["runtime-linking",], optional = true } table = { path = "../table" } [target.'cfg(unix)'.dependencies] @@ -37,4 +37,9 @@ futures = "0.3" [build-dependencies] curl = { version = "^0.4", features = ["rustls"] } -anyhow = "^1" \ No newline at end of file +anyhow = "^1" + +[features] +default = ["has_gpu", "openvino"] +openvino = ["dep:openvino"] +has_gpu = [] diff --git a/crates/core/build.rs b/crates/core/build.rs index 9303042027..d9e4e3c4c6 100644 --- a/crates/core/build.rs +++ b/crates/core/build.rs @@ -19,6 +19,10 @@ fn main() { } println!("cargo:rerun-if-changed=build.rs"); + println!("cargo:rerun-if-changed=src/*.rs"); + println!("cargo:rerun-if-changed=tests/integration_test.rs"); + println!("cargo:rerun-if-changed=tests/core-wasi-test/src/*.rs"); + println!("cargo:rerun-if-changed=tests/test_host_components/src/*.rs"); } fn try_download(url: &str, filename: &PathBuf) -> Result<(), anyhow::Error> { diff --git a/crates/core/tests/core-wasi-test/src/imagenet.rs b/crates/core/tests/core-wasi-test/src/imagenet.rs index 5fe02129f1..67892903d2 100644 --- a/crates/core/tests/core-wasi-test/src/imagenet.rs +++ b/crates/core/tests/core-wasi-test/src/imagenet.rs @@ -1,5 +1,5 @@ -use crate::ml::test::test::{graph, inference, tensor}; -use image2tensor::convert_image_to_tensor_bytes; +use crate::ml::test::test::{graph, inference, tensor, errors}; +use image2tensor::convert_image_bytes_to_tensor_bytes; use crate::imagenet_classes; use crate::Path; @@ -40,6 +40,43 @@ fn map_string_to_execution_target(target: &str) -> Result Vec { + let tensor_data = convert_image_bytes_to_tensor_bytes( + image_file_data, + tensor_dimensions[2], + tensor_dimensions[3], + image2tensor::TensorType::F32, + image2tensor::ColorOrder::BGR, + ) + .unwrap(); + + let mut new_tensor_data = Vec::::new(); + + let num_colors = tensor_dimensions[1] as usize; + let height = tensor_dimensions[2] as usize; + let width = tensor_dimensions[3] as usize; + + for c in 0..num_colors { + for y in 0..height { + for x in 0..width { + let offset = ((y * width + x) * 3 + c) * 4; + let v = f32::from_le_bytes( + tensor_data[offset..offset + 4] + .try_into() + .expect("Needed 4 bytes for a float"), + ); + new_tensor_data.push(v); + } + } + } + let (_head, body, _tail) = unsafe { new_tensor_data.align_to::() }; + body.to_vec() +} + pub fn imagenet_openvino_test( path_as_string: String, target_as_string: String, @@ -84,7 +121,7 @@ pub fn imagenet_openvino_test( }; let context = { let start_for_elapsed_macro = std::time::Instant::now(); - let context = graph::Graph::init_execution_context(&imagenet_graph).unwrap(); + let context = graph::Graph::init_execution_context(&imagenet_graph).map_err(|e| errors::Error::data(&e) ).expect("XXXXXXXX -> init_execution_context failed"); let elapsed = start_for_elapsed_macro.elapsed(); eprintln!( "Created context with ID: {:?} {}", @@ -95,14 +132,8 @@ pub fn imagenet_openvino_test( }; let tensor_dimensions: Vec = vec![1, 3, 224, 224]; - let tensor_data = convert_image_to_tensor_bytes( - &image_file, //"images/0.jpg", - tensor_dimensions[2], - tensor_dimensions[3], - image2tensor::TensorType::F32, - image2tensor::ColorOrder::BGR, - ) - .unwrap(); + let image_file_bytes = std::fs::read(&image_file).unwrap(); + let tensor_data = preprocess_image_for_imagenet(&image_file_bytes, &tensor_dimensions); let tensor_id = { let start_for_elapsed_macro = std::time::Instant::now(); diff --git a/crates/core/tests/integration_test.rs b/crates/core/tests/integration_test.rs index ba66361714..714443d9c1 100644 --- a/crates/core/tests/integration_test.rs +++ b/crates/core/tests/integration_test.rs @@ -5,7 +5,8 @@ use std::{ }; mod test_host_components; -use crate::test_host_components::ml::ml::MLHostComponent; + +use crate::test_host_components::ml_host_component::MLHostComponent; use crate::test_host_components::multiplier::{Multiplier, MultiplierHostComponent}; use anyhow::Context; @@ -58,7 +59,6 @@ async fn test_read_only_preopened_dir_write_fails() { .expect("trap error was not an I32Exit"); assert_eq!(trap.0, 1); } - #[tokio::test(flavor = "multi_thread")] async fn test_read_write_preopened_dir() { let filename = "test_file"; @@ -161,6 +161,7 @@ async fn test_host_component_data_update() { assert_eq!(stdout, "500"); } +#[cfg(feature = "openvino")] #[tokio::test(flavor = "multi_thread")] async fn test_host_component_imagenet_openvino_cpu() { let engine = test_engine(); @@ -186,6 +187,8 @@ async fn test_host_component_imagenet_openvino_cpu() { assert_eq!(stdout, "0.47 -> Eskimo dog, husky\n0.37 -> Siberian husky\n0.01 -> malamute, malemute, Alaskan malamute"); } +#[cfg(feature = "openvino")] +#[cfg(feature = "has_gpu")] #[tokio::test(flavor = "multi_thread")] async fn test_host_component_imagenet_openvino_gpu() { let engine = test_engine(); @@ -208,11 +211,11 @@ async fn test_host_component_imagenet_openvino_gpu() { ) .await .unwrap(); - assert_eq!(stdout, "0.96 -> mountain bike, all-terrain bike, off-roader\n0.01 -> bicycle-built-for-two, tandem bicycle, tandem\n0.00 -> alp"); + assert_eq!(stdout, "0.97 -> mountain bike, all-terrain bike, off-roader\n0.01 -> bicycle-built-for-two, tandem bicycle, tandem\n0.00 -> alp"); } #[tokio::test(flavor = "multi_thread")] -#[cfg(not(tarpaulin))] +// #[cfg(not(tarpaulin))] async fn test_panic() { let err = run_core_wasi_test(["panic"], |_| {}).await.unwrap_err(); let trap = err.downcast::().expect("trap"); @@ -230,7 +233,8 @@ fn test_config() -> Config { fn test_engine() -> Engine<()> { let mut builder = Engine::builder(&test_config()).unwrap(); builder.add_host_component(MultiplierHostComponent).unwrap(); - builder.add_host_component(MLHostComponent).unwrap(); + + builder.add_host_component(MLHostComponent {}).unwrap(); builder .link_import(|l, _| wasmtime_wasi::add_to_linker_async(l)) diff --git a/crates/core/tests/test_host_components/ml.rs b/crates/core/tests/test_host_components/ml.rs deleted file mode 100644 index 309f56d2f3..0000000000 --- a/crates/core/tests/test_host_components/ml.rs +++ /dev/null @@ -1,490 +0,0 @@ -pub mod ml { - wasmtime::component::bindgen!("ml" in "tests/core-wasi-test/wit"); - - use spin_core::HostComponent; - - use anyhow::{anyhow, Context}; - use test::test::errors; - use test::test::errors::HostError; - use test::test::graph; - use test::test::inference; - use test::test::tensor; - - use test::test::errors::ErrorCode; - use test::test::graph::{ExecutionTarget, Graph, GraphBuilder, GraphEncoding}; - use test::test::inference::GraphExecutionContext; - use wasmtime::component::Resource; - - use openvino::{Layout, Precision, TensorDesc}; - - #[derive(Clone)] - pub struct MLHostComponent; - - impl HostComponent for MLHostComponent { - type Data = MLHostImpl; - - fn add_to_linker( - linker: &mut spin_core::Linker, - get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> anyhow::Result<()> { - Ml::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - MLHostImpl { - ..Default::default() - } - } - } - #[derive(Debug)] - pub struct GraphInternalData { - pub xml: Vec, - pub weights: Vec, - pub target: ExecutionTarget, - } - - pub struct GraphExecutionContextInternalData { - pub cnn_network: openvino::CNNNetwork, - //pub executable_network: Mutex, - pub infer_request: openvino::InferRequest, - } - - pub struct TensorInternalData { - tensor_dimensions: tensor::TensorDimensions, - tensor_type: tensor::TensorType, - tensor_data: tensor::TensorData, - } - - pub struct ErrorInternalData { - code: errors::ErrorCode, - message: String, - } - - #[derive(Default)] - pub struct MLHostImpl { - pub openvino: Option, - pub graphs: table::Table, - pub executions: table::Table, - pub tensors: table::Table, - pub errors: table::Table, - } - - impl MLHostImpl { - fn new_error( - errors: &mut table::Table, - code: ErrorCode, - message: String, - ) -> Resource { - errors - .push(ErrorInternalData { code, message }) - .map(Resource::::new_own) - .expect("Can't allocate error") - } - - fn init_execution_context_internal( - graph: &GraphInternalData, - openvino: &mut Option, - executions: &mut table::Table, - ) -> Result, anyhow::Error> { - if openvino.is_none() { - openvino.replace(openvino::Core::new(None)?); - } - if openvino.is_some() { - let mut cnn_network = openvino - .as_mut() - .context("Can't create openvino graph without backend")? - .read_network_from_buffer(&graph.xml, &graph.weights)?; - - // Construct OpenVINO graph structures: `cnn_network` contains the graph - // structure, `exec_network` can perform inference. - //let core = self - // .0 - // .as_mut() - // .expect("openvino::Core was previously constructed"); - //let mut cnn_network = core.read_network_from_buffer(&xml, &weights)?; - - // TODO: this is a temporary workaround. We need a more elegant way to - // specify the layout in the long run. However, without this newer - // versions of OpenVINO will fail due to parameter mismatch. - for i in 0..cnn_network.get_inputs_len().unwrap() { - let name = cnn_network.get_input_name(i)?; - cnn_network.set_input_layout(&name, Layout::NHWC)?; - } - - let mut exec_network = openvino - .as_mut() - .expect("") - .load_network(&cnn_network, map_execution_target_to_string(graph.target))?; - let infer_request = exec_network - .create_infer_request() - .context("Can't create InferRequest")?; - let graph_execution_context = GraphExecutionContextInternalData { - cnn_network, - //executable_network: Mutex::new(exec_network), - infer_request, - }; - return executions - .push(graph_execution_context) - .map(Resource::::new_own) - .map_err(|_| anyhow!("Can't store execution context")); - } - Err(anyhow!("Can't create openvino backend")) - } - - fn get_output_internal( - graph_execution: &mut GraphExecutionContextInternalData, - input_name: String, - ) -> Result { - let index = input_name.parse::().map_err(|err| { - format!( - "Can't parse {} to usize for input_name, err = {err}", - input_name - ) - })?; - let output_name = graph_execution - .cnn_network - .get_output_name(index) - .map_err(|err| format!("Can't find output name for ID = {index}, err = {err}"))?; - - let blob = graph_execution - .infer_request - .get_blob(&output_name) - .map_err(|err| { - format!("Can't get blob for output name = {output_name}, err = {err}") - })?; - let tensor_desc = blob - .tensor_desc() - .map_err(|err| format!("Can't get blob description, err = {err}"))?; - let buffer = blob - .buffer() - .map_err(|err| format!("Can't get blob buffer, error = {err}"))? - .to_vec(); - let tensor_dimensions = tensor_desc - .dims() - .iter() - .map(|&d| d as u32) - .collect::>(); - let tensor = TensorInternalData { - tensor_dimensions, - tensor_type: map_precision_to_tensor_type(tensor_desc.precision()), - tensor_data: buffer, - }; - Ok(tensor) - } - } - - impl graph::HostGraph for MLHostImpl { - fn init_execution_context( - &mut self, - graph: Resource, - ) -> Result, Resource> { - let res = match self.graphs.get(graph.rep()) { - Some(graph) => MLHostImpl::init_execution_context_internal( - graph, - &mut self.openvino, - &mut self.executions, - ) - .map_err(|err| ErrorInternalData { - code: ErrorCode::RuntimeError, - message: err.to_string(), - }), - None => Err(ErrorInternalData { - code: ErrorCode::RuntimeError, - message: "Can't create graph execution context".to_string(), - }), - }; - match res { - Ok(res) => Ok(res), - Err(e) => Err(MLHostImpl::new_error(&mut self.errors, e.code, e.message)), - } - } - - fn drop(&mut self, graph: Resource) -> Result<(), anyhow::Error> { - self.graphs - .remove(graph.rep()) - .context(format!("Can't find graph with ID = {}", graph.rep())) - .map(|_| ()) - } - } - - impl errors::HostError for MLHostImpl { - fn new(&mut self, code: errors::ErrorCode, message: String) -> Resource { - MLHostImpl::new_error(&mut self.errors, code, message) - } - - fn drop( - &mut self, - error: Resource, - ) -> std::result::Result<(), anyhow::Error> { - self.errors - .remove(error.rep()) - .context(format!("Can't find error with ID = {}", error.rep())) - .map(|_| ()) - } - - fn code(&mut self, error: Resource) -> ErrorCode { - if let Some(e) = self.errors.get(error.rep()) { - return e.code; - } - panic!("Can't find error with ID = {}", error.rep()); - } - - fn data(&mut self, error: Resource) -> String { - if let Some(e) = self.errors.get(error.rep()) { - return e.message.clone(); - } - panic!("Can't find error with ID = {}", error.rep()); - } - } - impl tensor::HostTensor for MLHostImpl { - fn new( - &mut self, - tensor_dimensions: tensor::TensorDimensions, - tensor_type: tensor::TensorType, - tensor_data: tensor::TensorData, - ) -> Resource { - let tensor = TensorInternalData { - tensor_dimensions, - tensor_type, - tensor_data, - }; - self.tensors - .push(tensor) - .map(Resource::::new_own) - .expect("Can't allocate tensor") - } - fn dimensions(&mut self, tensor: Resource) -> Vec { - if let Some(t) = self.tensors.get(tensor.rep()) { - return t.tensor_dimensions.clone(); - } - panic!("Can't find tensor with ID = {}", tensor.rep()); - } - - fn ty(&mut self, tensor: Resource) -> tensor::TensorType { - if let Some(t) = self.tensors.get(tensor.rep()) { - return t.tensor_type; - } - panic!("Can't find tensor with ID = {}", tensor.rep()); - } - - fn data(&mut self, tensor: Resource) -> tensor::TensorData { - if let Some(t) = self.tensors.get(tensor.rep()) { - return t.tensor_data.clone(); - } - panic!("Can't find tensor with ID = {}", tensor.rep()); - } - fn drop( - &mut self, - tensor: Resource, - ) -> std::result::Result<(), anyhow::Error> { - self.tensors - .remove(tensor.rep()) - .context(format!("Can't find tensor with ID = {}", tensor.rep())) - .map(|_| ()) - } - } - - impl inference::HostGraphExecutionContext for MLHostImpl { - fn set_input( - &mut self, - graph_execution_context: Resource, - input_name: String, - tensor: Resource, - ) -> Result<(), Resource> { - let index = input_name - .parse() - .expect("Can't parse {} to usize for input_name"); - // Construct the blob structure. TODO: there must be some good way to - // discover the layout here; `desc` should not have to default to NHWC. - let tensor_resource = self - .tensors - .get(tensor.rep()) - .unwrap_or_else(|| panic!("Can't find tensor with ID = {}", tensor.rep())); - let precision = map_tensor_type_to_precision(tensor_resource.tensor_type); - let dimensions = tensor_resource - .tensor_dimensions - .iter() - .map(|&d| d as usize) - .collect::>(); - let desc = TensorDesc::new(Layout::NHWC, &dimensions, precision); - let blob = openvino::Blob::new(&desc, &tensor_resource.tensor_data) - .expect("Error in Blob::new"); - let execution_context: &mut GraphExecutionContextInternalData = self - .executions - .get_mut(graph_execution_context.rep()) - .unwrap_or_else(|| panic!("Can't find tensor with ID = {}", tensor.rep())); - let input_name = execution_context - .cnn_network - .get_input_name(index) - .unwrap_or_else(|_| panic!("Can't find input with name = {}", index)); - match execution_context.infer_request.set_blob(&input_name, &blob) { - Ok(res) => Ok(res), - Err(err) => Err(self.new( - ErrorCode::RuntimeError, - format!("Inference error = {:?}", err.to_string()), - )), - } - } - - fn compute( - &mut self, - graph_execution_context: Resource, - ) -> Result<(), Resource> { - let graph_execution = self - .executions - .get_mut(graph_execution_context.rep()) - .ok_or(MLHostImpl::new_error( - &mut self.errors, - ErrorCode::RuntimeError, - format!( - "Can't find graph execution context with ID = {}", - graph_execution_context.rep() - ), - ))?; - match graph_execution.infer_request.infer() { - Ok(..) => Ok(()), - Err(err) => Err(MLHostImpl::new_error( - &mut self.errors, - ErrorCode::RuntimeError, - format!("Inference error = {:?}", err.to_string()), - )), - } - } - - fn get_output( - &mut self, - graph_execution_context: Resource, - input_name: String, - ) -> Result, Resource> { - let graph_execution = self - .executions - .get_mut(graph_execution_context.rep()) - .ok_or(format!( - "Can't find graph execution context with ID = {}", - graph_execution_context.rep() - )) - .unwrap(); - - match MLHostImpl::get_output_internal(graph_execution, input_name) { - Ok(tensor) => self - .tensors - .push(tensor) - .map(Resource::::new_own) - .map_err(|_| { - MLHostImpl::new_error( - &mut self.errors, - ErrorCode::RuntimeError, - "Can't create tensor for get_output".to_string(), - ) - }), - Err(err) => Err(MLHostImpl::new_error( - &mut self.errors, - ErrorCode::RuntimeError, - err, - )), - } - } - - fn drop( - &mut self, - execution: Resource, - ) -> std::result::Result<(), anyhow::Error> { - let id = execution.rep(); - self.executions - .remove(id) - .context("{Can't drow GraphExecutionContext with id = {id}") - .map(|_| ()) - } - } - - impl errors::Host for MLHostImpl {} - impl graph::Host for MLHostImpl { - fn load( - &mut self, - graph: Vec, - graph_encoding: GraphEncoding, - target: ExecutionTarget, - ) -> Result, Resource> { - if graph.len() != 2 { - return Err(MLHostImpl::new_error( - &mut self.errors, - ErrorCode::RuntimeError, - "Expected 2 elements in graph builder vector".to_string(), - )); - } - if graph_encoding != GraphEncoding::Openvino { - return Err(MLHostImpl::new_error( - &mut self.errors, - ErrorCode::RuntimeError, - "Only OpenVINO encoding is supported".to_string(), - )); - } - // Read the guest array. - let graph_internal_data = GraphInternalData { - xml: graph[0].clone(), - weights: graph[1].clone(), - target, - }; - match self.graphs.push(graph_internal_data) { - Ok(graph_rep) => Ok(Resource::::new_own(graph_rep)), - Err(err) => { - match self.errors.push(ErrorInternalData { - code: ErrorCode::RuntimeError, - message: format!("{:?}", err), - }) { - Ok(error_rep) => Err(Resource::::new_own(error_rep)), - Err(err) => { - panic!("Can't create internal error for {:?}", err); - } - } - } - } - } - fn load_by_name( - &mut self, - _graph: String, - ) -> Result, Resource> { - panic!("[graph::Host] fn load_by_name -> Not implemented"); - } - } - - impl inference::Host for MLHostImpl {} - impl tensor::Host for MLHostImpl {} - - /// Return the execution target string expected by OpenVINO from the - /// `ExecutionTarget` enum provided by wasi-nn. - fn map_execution_target_to_string(target: ExecutionTarget) -> &'static str { - match target { - ExecutionTarget::Cpu => "CPU", - ExecutionTarget::Gpu => "GPU", - ExecutionTarget::Tpu => { - unimplemented!("OpenVINO does not support TPU execution targets") - } - } - } - - /// Return OpenVINO's precision type for the `TensorType` enum provided by - /// wasi-nn. - fn map_tensor_type_to_precision(tensor_type: tensor::TensorType) -> openvino::Precision { - match tensor_type { - tensor::TensorType::Fp16 => Precision::FP16, - tensor::TensorType::Fp32 => Precision::FP32, - tensor::TensorType::Fp64 => Precision::FP64, - tensor::TensorType::U8 => Precision::U8, - tensor::TensorType::I32 => Precision::I32, - tensor::TensorType::I64 => Precision::I64, - tensor::TensorType::Bf16 => todo!("not yet supported in `openvino` bindings"), - } - } - fn map_precision_to_tensor_type(precision: openvino::Precision) -> tensor::TensorType { - match precision { - Precision::FP16 => tensor::TensorType::Fp16, - Precision::FP32 => tensor::TensorType::Fp32, - Precision::FP64 => tensor::TensorType::Fp64, - Precision::U8 => tensor::TensorType::U8, - Precision::I32 => tensor::TensorType::I32, - Precision::I64 => tensor::TensorType::I64, - _ => todo!("not yet supported in `openvino` bindings"), - } - } -} diff --git a/crates/core/tests/test_host_components/ml_backend/mod.rs b/crates/core/tests/test_host_components/ml_backend/mod.rs new file mode 100644 index 0000000000..9a5fba2310 --- /dev/null +++ b/crates/core/tests/test_host_components/ml_backend/mod.rs @@ -0,0 +1,61 @@ +use crate::test_host_components::ml_host_impl; +use crate::test_host_components::ml_wit::test::test as ml_wit; + +use ml_host_impl::{ExecutionContext, GraphInternalData, TensorInternalData}; +use ml_wit::graph::{ExecutionTarget, GraphBuilder, GraphEncoding}; + +#[cfg(feature = "openvino")] +pub mod openvino; + +/// A [Backend] contains the necessary state to load [Graph]s. +pub trait BackendInner: Send + Sync { + fn encoding(&self) -> GraphEncoding; + fn load( + &mut self, + builders: Vec, + target: ExecutionTarget, + encoding: GraphEncoding, + name: Option, + ) -> Result; + + fn load_by_name(&mut self, model_name: String) -> Result; +} + +/// A [BackendGraph] can create [BackendExecutionContext]s; this is the backing +/// implementation for the user-facing graph. +pub trait BackendGraph: Send + Sync { + fn init_execution_context(&mut self) -> Result; +} + +pub trait BackendExecutionContext: Send + Sync { + fn set_input( + &mut self, + tensor_id: &TensorId, + tensor: &TensorInternalData, + ) -> Result<(), anyhow::Error>; + + fn compute(&mut self) -> Result<(), anyhow::Error>; + + fn get_output(&mut self, tensor_id: &TensorId) -> Result; +} + +/// An identifier for a tensor in a [Graph]. +#[derive(Debug)] +pub enum TensorId { + Index(u32), + //Name(String), +} +impl TensorId { + pub fn index(&self) -> Option { + match self { + TensorId::Index(i) => Some(*i), + //TensorId::Name(_) => None, + } + } + pub fn name(&self) -> Option<&str> { + match self { + TensorId::Index(_) => None, + //TensorId::Name(n) => Some(n), + } + } +} diff --git a/crates/core/tests/test_host_components/ml_backend/openvino.rs b/crates/core/tests/test_host_components/ml_backend/openvino.rs new file mode 100644 index 0000000000..487b54a6cb --- /dev/null +++ b/crates/core/tests/test_host_components/ml_backend/openvino.rs @@ -0,0 +1,190 @@ +use crate::test_host_components::ml_wit::test::test as ml_wit; + +use ml_wit::graph::{ExecutionTarget, GraphBuilder, GraphEncoding}; +use ml_wit::tensor; +use ml_wit::tensor::TensorType; + +use crate::test_host_components::ml_backend::{ + BackendExecutionContext, BackendGraph, BackendInner, TensorId, +}; +use crate::test_host_components::ml_host_impl::{ + ExecutionContext, GraphInternalData, TensorInternalData, +}; + +use openvino::{DeviceType, ElementType, Shape, Tensor as OvTensor}; + +use anyhow::anyhow; +use std::sync::{Arc, Mutex}; + +pub struct OpenvinoBackend { + pub openvino: openvino::Core, +} + +struct OpenvinoGraph(Arc>); + +unsafe impl Send for OpenvinoGraph {} +unsafe impl Sync for OpenvinoGraph {} + +unsafe impl Send for OpenvinoBackend {} +unsafe impl Sync for OpenvinoBackend {} + +impl BackendInner for OpenvinoBackend { + fn encoding(&self) -> GraphEncoding { + GraphEncoding::Openvino + } + + fn load( + &mut self, + builders: Vec, + target: ExecutionTarget, + encoding: GraphEncoding, + _name: Option, + ) -> Result { + if builders.len() != 2 { + return Err(anyhow!("Expected 2 elements in graph builder vector")); + } + if encoding != GraphEncoding::Openvino { + return Err(anyhow!("Only OpenVINO encoding is supported")); + } + + // Read the guest array. + let xml = &builders[0]; + let weights = &builders[1]; + + // Construct a new tensor for the model weights. + let shape = Shape::new(&[1, weights.len() as i64 / 4])?; + let mut weights_tensor = OvTensor::new(ElementType::F32, &shape)?; + let buffer = weights_tensor.get_raw_data_mut()?; + buffer.copy_from_slice(weights); + + // Construct OpenVINO graph structures: `model` contains the graph + // structure, `compiled_model` can perform inference. + + let model = self + .openvino + .read_model_from_buffer(xml, Some(&weights_tensor))?; + + let compiled_model = self + .openvino + .compile_model(&model, map_execution_target_to_string(target))?; + Ok(GraphInternalData(Box::new(OpenvinoGraph(Arc::new( + Mutex::new(compiled_model), + ))))) + } + + fn load_by_name(&mut self, _model_name: String) -> Result { + Err(anyhow!("not implemented")) + } +} + +impl BackendGraph for OpenvinoGraph { + fn init_execution_context(&mut self) -> Result { + let mut compiled_model = self.0.lock().unwrap(); + let infer_request = compiled_model.create_infer_request()?; + Ok(ExecutionContext(Box::new(OpenvinoExecutionContext { + infer_request, + }))) + } +} + +pub struct OpenvinoExecutionContext { + pub infer_request: openvino::InferRequest, +} + +unsafe impl Send for OpenvinoExecutionContext {} +unsafe impl Sync for OpenvinoExecutionContext {} + +impl BackendExecutionContext for OpenvinoExecutionContext { + fn set_input( + &mut self, + tensor_id: &TensorId, + tensor: &TensorInternalData, + ) -> Result<(), anyhow::Error> { + // Construct the tensor. + let precision = map_tensor_type_to_element_type(&tensor.tensor_type); + let dimensions = tensor + .tensor_dimensions + .iter() + .map(|&d| d as i64) + .collect::>(); + let shape = Shape::new(&dimensions)?; + let mut new_tensor = OvTensor::new(precision, &shape)?; + let buffer = new_tensor.get_raw_data_mut()?; + buffer.copy_from_slice(&tensor.tensor_data); + + // Assign the tensor to the request. + match tensor_id { + TensorId::Index(i) => self + .infer_request + .set_input_tensor_by_index(*i as usize, &new_tensor)?, + //TensorId::Name(name) => self.infer_request.set_tensor(name, &new_tensor)?, + }; + + Ok(()) + } + + fn compute(&mut self) -> Result<(), anyhow::Error> { + self.infer_request + .infer() + .map_err(|err| anyhow!("Inference error = {:?}", err.to_string())) + } + + fn get_output(&mut self, tensor_id: &TensorId) -> Result { + let output_tensor = match tensor_id { + TensorId::Index(i) => self.infer_request.get_output_tensor_by_index(*i as usize)?, + //TensorId::Name(name) => self.infer_request.get_tensor(name)?, + }; + let dimensions = output_tensor + .get_shape()? + .get_dimensions() + .iter() + .map(|&dim| dim as u32) + .collect::>(); + let element_type = output_tensor + .get_element_type() + .map_err(|err| anyhow!("Inference error = {err:?}"))?; + let data = output_tensor.get_raw_data()?.to_vec(); + Ok(TensorInternalData { + tensor_dimensions: dimensions, + tensor_type: map_precision_to_tensor_type(element_type), + tensor_data: data, + }) + } +} + +/// Return the execution target string expected by OpenVINO from the +/// `ExecutionTarget` enum provided by wasi-nn. +fn map_execution_target_to_string(target: ExecutionTarget) -> DeviceType<'static> { + match target { + ExecutionTarget::Cpu => DeviceType::CPU, + ExecutionTarget::Gpu => DeviceType::GPU, + ExecutionTarget::Tpu => { + unimplemented!("OpenVINO does not support TPU execution targets") + } + } +} + +fn map_precision_to_tensor_type(precision: openvino::ElementType) -> tensor::TensorType { + //use openvino::Precision; + match precision { + ElementType::F16 => TensorType::Fp16, + ElementType::F32 => TensorType::Fp32, + ElementType::F64 => TensorType::Fp64, + ElementType::U8 => TensorType::U8, + ElementType::I32 => TensorType::I32, + ElementType::I64 => TensorType::I64, + _ => todo!("not yet supported in `openvino` bindings"), + } +} + +fn map_tensor_type_to_element_type(tensor_type: &TensorType) -> ElementType { + match tensor_type { + TensorType::Fp16 => ElementType::F16, + TensorType::Fp32 => ElementType::F32, + TensorType::Fp64 => ElementType::F64, + TensorType::U8 => ElementType::U8, + TensorType::I32 => ElementType::I32, + TensorType::I64 => ElementType::I64, + TensorType::Bf16 => ElementType::Bf16, + } +} diff --git a/crates/core/tests/test_host_components/ml_host_component.rs b/crates/core/tests/test_host_components/ml_host_component.rs new file mode 100644 index 0000000000..06107604e8 --- /dev/null +++ b/crates/core/tests/test_host_components/ml_host_component.rs @@ -0,0 +1,39 @@ +use spin_core::HostComponent; + +use crate::test_host_components::ml_backend; +use crate::test_host_components::ml_host_impl::MLHostImpl; +use crate::test_host_components::ml_wit::test::test as ml_wit; +use ml_backend::BackendInner; + +#[cfg(feature = "openvino")] +use ml_backend::openvino::OpenvinoBackend; + +pub struct MLHostComponent {} + +impl HostComponent for MLHostComponent { + type Data = MLHostImpl; + + fn add_to_linker( + linker: &mut spin_core::Linker, + get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, + ) -> anyhow::Result<()> { + ml_wit::graph::add_to_linker(linker, get)?; + ml_wit::inference::add_to_linker(linker, get)?; + ml_wit::errors::add_to_linker(linker, get)?; + ml_wit::tensor::add_to_linker(linker, get) + } + + fn build_data(&self) -> Self::Data { + let backends: Vec> = vec![ + #[cfg(feature = "openvino")] + Box::new(OpenvinoBackend { + openvino: openvino::Core::new().unwrap(), + }), + ]; + + MLHostImpl { + backends, + ..Default::default() + } + } +} diff --git a/crates/core/tests/test_host_components/ml_host_impl.rs b/crates/core/tests/test_host_components/ml_host_impl.rs new file mode 100644 index 0000000000..0f781dc441 --- /dev/null +++ b/crates/core/tests/test_host_components/ml_host_impl.rs @@ -0,0 +1,376 @@ +use anyhow::{anyhow, Context}; + +use crate::test_host_components::ml_backend; +use crate::test_host_components::ml_wit::test::test as ml_wit; + +use ml_wit::errors::ErrorCode; +use ml_wit::graph::{ExecutionTarget, Graph, GraphBuilder, GraphEncoding}; +use ml_wit::inference::GraphExecutionContext; +use ml_wit::{errors, graph, inference, tensor}; + +use spin_core::wasmtime::component::Resource; + +use ml_backend::{BackendExecutionContext, BackendGraph, BackendInner, TensorId}; + +pub struct GraphInternalData(pub Box); + +pub struct TensorInternalData { + pub tensor_dimensions: tensor::TensorDimensions, + pub tensor_type: tensor::TensorType, + pub tensor_data: tensor::TensorData, +} + +pub struct ErrorInternalData { + code: errors::ErrorCode, + message: String, +} + +pub struct ExecutionContext(pub Box); + +#[derive(Default)] +pub struct MLHostImpl { + pub graphs: table::Table, + pub tensors: table::Table, + pub errors: table::Table, + + pub executions: table::Table, + pub backends: Vec>, +} + +impl MLHostImpl { + pub fn new_error( + errors: &mut table::Table, + code: ErrorCode, + message: String, + ) -> Resource { + errors + .push(ErrorInternalData { code, message }) + .map(Resource::::new_own) + .expect("Can't allocate error") + } + + fn new_graph( + graphs: &mut table::Table, + errors: &mut table::Table, + graph_internal_data: GraphInternalData, + ) -> Result, Resource>, anyhow::Error> { + Ok(match graphs.push(graph_internal_data) { + Ok(graph_rep) => Ok(Resource::::new_own(graph_rep)), + Err(err) => Err(MLHostImpl::new_error( + errors, + ErrorCode::RuntimeError, + format!("{:?}", err), + )), + }) + } +} + +impl graph::HostGraph for MLHostImpl { + fn init_execution_context( + &mut self, + graph: Resource, + ) -> Result, Resource> { + if let Some(graph) = self.graphs.get_mut(graph.rep()) { + match graph.0.init_execution_context() { + Ok(execution_context) => { + return self + .executions + .push(execution_context) + .map(Resource::::new_own) + .map_err(|_| { + MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + "Can't create graph execution context".to_string(), + ) + }); + } + Err(err) => { + return Err(MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + err.to_string(), + )); + } + } + } + panic!("[graph::HostGraph] fn init_execution_context -> Not implemented") + } + + fn drop(&mut self, graph: Resource) -> Result<(), anyhow::Error> { + self.graphs + .remove(graph.rep()) + .context(format!("Can't find graph with ID = {}", graph.rep()))?; + Ok(()) + } +} + +impl errors::HostError for MLHostImpl { + fn new(&mut self, code: errors::ErrorCode, data: String) -> Resource { + MLHostImpl::new_error(&mut self.errors, code, data) + } + + fn drop(&mut self, error: Resource) -> Result<(), anyhow::Error> { + self.errors + .remove(error.rep()) + .ok_or(anyhow!(format!( + "Can't find error with ID = {}", + error.rep() + ))) + .map(|_| ()) + } + + fn code(&mut self, error: Resource) -> ErrorCode { + self.errors.get(error.rep()).unwrap().code + } + + fn data(&mut self, error: Resource) -> String { + self.errors.get(error.rep()).unwrap().message.clone() + } +} + +impl tensor::HostTensor for MLHostImpl { + fn new( + &mut self, + tensor_dimensions: tensor::TensorDimensions, + tensor_type: tensor::TensorType, + tensor_data: tensor::TensorData, + ) -> Resource { + let tensor = TensorInternalData { + tensor_dimensions, + tensor_type, + tensor_data, + }; + self.tensors + .push(tensor) + .map(Resource::::new_own) + .map_err(|_| anyhow!("Can't allocate tensor")) + .unwrap() + } + fn dimensions(&mut self, tensor: Resource) -> Vec { + self.tensors + .get(tensor.rep()) + .ok_or(anyhow!(format!( + "Can't find tensor with ID = {}", + tensor.rep() + ))) + .map(|t| t.tensor_dimensions.clone()) + .unwrap() + } + fn ty(&mut self, tensor: Resource) -> tensor::TensorType { + self.tensors + .get(tensor.rep()) + .ok_or(anyhow!(format!( + "Can't find tensor with ID = {}", + tensor.rep() + ))) + .map(|t| t.tensor_type) + .unwrap() + } + fn data(&mut self, tensor: Resource) -> tensor::TensorData { + self.tensors + .get(tensor.rep()) + .ok_or(anyhow!(format!( + "Can't find tensor with ID = {}", + tensor.rep() + ))) + .map(|t| t.tensor_data.clone()) + .unwrap() + } + fn drop(&mut self, tensor: Resource) -> Result<(), anyhow::Error> { + self.tensors + .remove(tensor.rep()) + .context(format!("Can't find tensor with ID = {}", tensor.rep()))?; + Ok(()) + } +} + +impl inference::HostGraphExecutionContext for MLHostImpl { + fn set_input( + &mut self, + graph_execution_context: Resource, + input_name: String, + tensor: Resource, + ) -> Result<(), Resource> { + let execution_context = self + .executions + .get_mut(graph_execution_context.rep()) + .context(format!( + "Can't find graph execution context with ID = {}", + graph_execution_context.rep() + )) + .unwrap(); + + let tensor = self + .tensors + .get(tensor.rep()) + .context(format!("Can't find tensor with ID = {}", tensor.rep())) + .unwrap(); + + let index = input_name + .parse::() + .context("Can't parse {} to usize for input_name={input_name}") + .unwrap(); + let tensor_id = TensorId::Index(index as u32); + + execution_context + .0 + .set_input(&tensor_id, tensor) + .map_err(|err| { + MLHostImpl::new_error(&mut self.errors, ErrorCode::RuntimeError, err.to_string()) + }) + } + + fn compute( + &mut self, + graph_execution_context: Resource, + ) -> Result<(), Resource> { + let graph_execution = self + .executions + .get_mut(graph_execution_context.rep()) + .ok_or(anyhow!(format!( + "Can't find graph execution context with ID = {}", + graph_execution_context.rep() + ))) + .unwrap(); + + graph_execution.0.compute().map_err(|err| { + MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + format!("Inference error = {:?}", err.to_string()), + ) + }) + } + + fn get_output( + &mut self, + graph_execution_context: Resource, + input_name: String, + ) -> Result, Resource> { + let graph_execution = self + .executions + .get_mut(graph_execution_context.rep()) + .ok_or(anyhow!(format!( + "Can't find graph execution context with ID = {}", + graph_execution_context.rep() + ))) + .unwrap(); + + let index = input_name + .parse::() + .context("Can't parse {} to usize for input_name={input_name}") + .unwrap(); + let tensor_id = TensorId::Index(index as u32); + + let res = graph_execution.0.get_output(&tensor_id).map_err(|err| { + MLHostImpl::new_error(&mut self.errors, ErrorCode::RuntimeError, err.to_string()) + }); + match res { + Ok(tensor) => { + match self + .tensors + .push(tensor) + .map(Resource::::new_own) + { + Ok(t) => Ok(t), + Err(_) => Err(self + .errors + .push(ErrorInternalData { + code: ErrorCode::RuntimeError, + message: "Can't create tensor for get_output".to_string(), + }) + .map(Resource::::new_own) + .map_err(|_| anyhow!("Can't allocate error")) + .unwrap()), + } + } + Err(err) => Err(err), + } + } + + fn drop(&mut self, execution: Resource) -> Result<(), anyhow::Error> { + let id = execution.rep(); + self.executions + .remove(id) + .context("{Can't drow GraphExecutionContext with id = {id}")?; + Ok(()) + } +} + +impl errors::Host for MLHostImpl {} + +impl graph::Host for MLHostImpl { + fn load( + &mut self, + builders: Vec, + graph_encoding: GraphEncoding, + target: ExecutionTarget, + ) -> Result, Resource> { + for backend in self.backends.iter_mut() { + if backend.encoding() == graph_encoding { + match backend.load(builders, target, graph_encoding, None) { + Ok(graph_internal_data) => { + return MLHostImpl::new_graph( + &mut self.graphs, + &mut self.errors, + graph_internal_data, + ) + .unwrap(); + } + Err(err) => { + return Err(MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + format!("Can't load model error = {:?}", err), + )); + } + } + } + } + panic!("[graph::Host] fn load -> graph_encoding = {graph_encoding:?} is not supported") + } + + fn load_by_name( + &mut self, + model_name: String, + ) -> Result, Resource> { + let parts: Vec<_> = model_name.split(':').map(|x| x.to_string()).collect(); + if parts.len() > 1 { + if let Some(graph_encoding) = map_string_to_graph_encoding(&parts[0]) { + for backend in self.backends.iter_mut() { + if backend.encoding() == graph_encoding { + match backend.load_by_name(model_name.clone()) { + Ok(graph_internal_data) => { + return MLHostImpl::new_graph( + &mut self.graphs, + &mut self.errors, + graph_internal_data, + ) + .unwrap(); + } + Err(err) => { + return Err(MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + format!("Can't load model '{model_name}' error = {err:?}"), + )); + } + } + } + } + } + } + panic!("[graph::Host] fn load_by_name -> model not supported ") + } +} + +impl inference::Host for MLHostImpl {} +impl tensor::Host for MLHostImpl {} + +fn map_string_to_graph_encoding(target: &str) -> Option { + match target { + "openvino" => Some(GraphEncoding::Openvino), + _ => None, + } +} diff --git a/crates/core/tests/test_host_components/mod.rs b/crates/core/tests/test_host_components/mod.rs index 62a1ead4cb..b41a741525 100644 --- a/crates/core/tests/test_host_components/mod.rs +++ b/crates/core/tests/test_host_components/mod.rs @@ -1,2 +1,9 @@ -pub mod ml; +pub mod ml_backend; +pub mod ml_wit { + wasmtime::component::bindgen!("ml" in "tests/core-wasi-test/wit"); +} + +pub mod ml_host_component; +pub mod ml_host_impl; + pub mod multiplier; diff --git a/crates/core/tests/test_host_components/multiplier.rs b/crates/core/tests/test_host_components/multiplier.rs index b744c74df3..fa9160897f 100644 --- a/crates/core/tests/test_host_components/multiplier.rs +++ b/crates/core/tests/test_host_components/multiplier.rs @@ -4,6 +4,7 @@ use spin_core::HostComponent; #[derive(Clone)] pub struct MultiplierHostComponent; +#[allow(clippy::module_inception)] mod multiplier { wasmtime::component::bindgen!("multiplier" in "tests/core-wasi-test/wit"); } diff --git a/crates/ml/Cargo.toml b/crates/ml/Cargo.toml index fd1d29e84e..a984e79a67 100644 --- a/crates/ml/Cargo.toml +++ b/crates/ml/Cargo.toml @@ -14,16 +14,15 @@ spin-core = { path = "../core" } spin-world = { path = "../world" } table = { path = "../table" } -openvino = { version = "0.7.2", features = ["runtime-linking",] } - -ort = { version = "2.0.0-rc.2", default-features = false, features = [ - "copy-dylibs", - "download-binaries", - "load-dynamic", -] } +openvino = { version = "0.7.2", features = ["runtime-linking",], optional = true } curl = { version = "^0.4", features = ["rustls"] } sha1 = "^0.10" hex-literal = "^0.4" tracing = { workspace = true } + +[features] +default = ["openvino"] +openvino = ["dep:openvino"] +has_gpu = [] diff --git a/crates/ml/src/backend/mod.rs b/crates/ml/src/backend/mod.rs index 4422ce94a8..cc6e8f16fb 100644 --- a/crates/ml/src/backend/mod.rs +++ b/crates/ml/src/backend/mod.rs @@ -1,14 +1,11 @@ -//#[cfg(feature = "openvino")] -pub mod openvino; -//pub mod onnx; - use spin_world::v2 as ml_wit; use ml_wit::graph::{ExecutionTarget, GraphBuilder, GraphEncoding}; -//use ml_wit::inference::GraphExecutionContext; -use ml_wit::tensor; -use crate::host_impl::{ExecutionContext, GraphInternalData, TensorInternalData}; +use crate::ml_host_impl::{ExecutionContext, GraphInternalData, TensorInternalData}; + +#[cfg(feature = "openvino")] +pub mod openvino; /// A [Backend] contains the necessary state to load [Graph]s. pub trait BackendInner: Send + Sync { @@ -22,13 +19,6 @@ pub trait BackendInner: Send + Sync { ) -> Result; fn load_by_name(&mut self, model_name: String) -> Result; - //fn as_dir_loadable<'a>(&'a mut self) -> Option<&'a mut dyn BackendFromDir>; - /* - fn init_execution_context( - &mut self, - graph: &GraphInternalData, - ) -> Result; - */ } /// A [BackendGraph] can create [BackendExecutionContext]s; this is the backing diff --git a/crates/ml/src/backend/onnx.rs b/crates/ml/src/backend/onnx.rs deleted file mode 100644 index a9c5ba2eda..0000000000 --- a/crates/ml/src/backend/onnx.rs +++ /dev/null @@ -1,336 +0,0 @@ -use super::{BackendError, BackendExecutionContext, BackendFromDir, BackendGraph, BackendInner}; -use crate::backend::{read, Id}; -use crate::wit::types::{ExecutionTarget, GraphEncoding, Tensor, TensorType}; -use crate::{ExecutionContext, Graph}; -use anyhow::Context; -use ort::{inputs, GraphOptimizationLevel, Session}; -use std::path::Path; -use std::sync::{Arc, Mutex}; - -#[derive(Default)] -pub struct OnnxBackend(); -unsafe impl Send for OnnxBackend {} -unsafe impl Sync for OnnxBackend {} - -impl BackendInner for OnnxBackend { - fn encoding(&self) -> GraphEncoding { - GraphEncoding::Onnx - } - - fn load(&mut self, builders: &[&[u8]], target: ExecutionTarget) -> Result { - if builders.len() != 1 { - return Err(BackendError::InvalidNumberOfBuilders(1, builders.len()).into()); - } - - let session = Session::builder()? - .with_optimization_level(GraphOptimizationLevel::Level3)? - .commit_from_memory(builders[0])?; - - let box_: Box = - Box::new(OnnxGraph(Arc::new(Mutex::new(session)), target)); - Ok(box_.into()) - } - - fn as_dir_loadable<'a>(&'a mut self) -> Option<&'a mut dyn BackendFromDir> { - Some(self) - } -} - -impl BackendFromDir for OnnxBackend { - fn load_from_dir( - &mut self, - path: &Path, - target: ExecutionTarget, - ) -> Result { - let model = read(&path.join("model.onnx"))?; - self.load(&[&model], target) - } -} - -struct OnnxGraph(Arc>, #[allow(dead_code)] ExecutionTarget); -unsafe impl Send for OnnxGraph {} -unsafe impl Sync for OnnxGraph {} - -impl BackendGraph for OnnxGraph { - fn init_execution_context(&self) -> Result { - let session = self.0.lock().unwrap(); - // We need to hold on to the names of the inputs in order for - // `set_input` to work with both indexes and names. Having the - // dimensions and type around is useful for validation but could be - // retrieved from the session. - let mut inputs = vec![]; - for input in &session.inputs { - let shape = Shape::from_onnx_input(input)?; - inputs.push(TensorSlot { - shape, - tensor: None, - }); - } - // We need to keep track of the output shapes since they are used for - // creating the output tensor. - let mut outputs = vec![]; - for output in &session.outputs { - let shape = Shape::from_onnx_output(output)?; - outputs.push(TensorSlot { - shape, - tensor: None, - }); - } - let box_: Box = Box::new(OnnxExecutionContext { - session: self.0.clone(), - inputs, - outputs, - }); - Ok(box_.into()) - } -} - -struct OnnxExecutionContext { - session: Arc>, - inputs: Vec, - outputs: Vec, -} - -unsafe impl Send for OnnxExecutionContext {} -unsafe impl Sync for OnnxExecutionContext {} - -impl OnnxExecutionContext { - /// Helper function for finding the internal index of a tensor by [`Id`]. - fn find(&self, id: Id, list: &[TensorSlot]) -> Result { - let index = match id { - Id::Index(i) => { - let i = i as usize; - if i < list.len() { - i - } else { - return Err(BackendError::BackendAccess(anyhow::anyhow!( - "incorrect tensor index: {i} >= {}", - list.len() - ))); - } - } - Id::Name(n) => list.iter().position(|s| s.shape.name == n).ok_or_else(|| { - BackendError::BackendAccess(anyhow::anyhow!("unknown tensor name: {n}")) - })?, - }; - Ok(index) - } -} - -impl BackendExecutionContext for OnnxExecutionContext { - fn set_input(&mut self, id: Id, tensor: &Tensor) -> Result<(), BackendError> { - let index = self.find(id, &self.inputs)?; - let input = &mut self.inputs[index]; - if let Err(e) = input.shape.matches(tensor) { - return Err(e.into()); - } - // Hold the tensor data on the context until `compute` is called. - input.tensor.replace(tensor.clone()); - Ok(()) - } - - fn compute(&mut self) -> Result<(), BackendError> { - let mut session_inputs: Vec> = vec![]; - for i in &self.inputs { - session_inputs.extend(to_input_value(i)?); - } - let session = self.session.lock().unwrap(); - let session_outputs = session.run(session_inputs.as_slice())?; - for i in 0..self.outputs.len() { - // TODO: fix preexisting gap--this only handles f32 tensors. - let raw: (Vec, &[f32]) = session_outputs[i].try_extract_raw_tensor()?; - let f32s = raw.1.to_vec(); - let output = &mut self.outputs[i]; - output.tensor.replace(Tensor { - dimensions: output.shape.dimensions_as_u32()?, - ty: output.shape.ty, - data: f32_vec_to_bytes(f32s), - }); - } - Ok(()) - } - - fn get_output(&mut self, id: Id) -> Result { - let index = self.find(id, &self.outputs)?; - let output = &self.outputs[index]; - if let Some(tensor) = &output.tensor { - Ok(tensor.clone()) - } else { - Err(BackendError::BackendAccess(anyhow::anyhow!( - "missing output tensor: {}; has `compute` been called?", - output.shape.name - ))) - } - } -} - -impl From for BackendError { - fn from(e: ort::Error) -> Self { - BackendError::BackendAccess(e.into()) - } -} - -/// Holds a slot for ONNX session inputs and outputs. -/// -/// TODO: it seems unfortunate that we have to "hold" some extra data per -/// session but in the input case, this is necessary for name-based indexing. -struct TensorSlot { - shape: Shape, - tensor: Option, -} - -/// Describes a tensor in ONNX terms. -struct Shape { - name: String, - dimensions: Vec, - ty: TensorType, -} - -impl Shape { - fn from_onnx_input(input: &ort::Input) -> Result { - let name = input.name.clone(); - let (dimensions, ty) = convert_value_type(&input.input_type)?; - Ok(Self { - name, - dimensions, - ty, - }) - } - - fn from_onnx_output(output: &ort::Output) -> Result { - let name = output.name.clone(); - let (dimensions, ty) = convert_value_type(&output.output_type)?; - Ok(Self { - name, - dimensions, - ty, - }) - } - - fn dimensions_as_u32(&self) -> Result, BackendError> { - self.dimensions - .iter() - .map(|d| if *d == -1 { Ok(1) } else { convert_i64(d) }) - .collect() - } - - fn matches(&self, tensor: &Tensor) -> anyhow::Result<()> { - if self.dimensions.len() != tensor.dimensions.len() { - return Err(anyhow::anyhow!( - "input tensor cardinality does not match model: {:?} != {:?}", - self.dimensions, - tensor.dimensions - )); - } else { - for (&shape_dim, &tensor_dim) in self.dimensions.iter().zip(tensor.dimensions.iter()) { - let tensor_dim = tensor_dim as i64; - if !is_dynamic_dimension(shape_dim) && shape_dim != tensor_dim { - return Err(anyhow::anyhow!( - "input tensor dimensions do not match model: {:?} != {:?}", - self.dimensions, - tensor.dimensions - )); - } - } - } - if self.ty != tensor.ty { - return Err(anyhow::anyhow!( - "input tensor type does not match model: {:?} != {:?}", - self.ty, - tensor.ty - )); - } - Ok(()) - } -} - -fn convert_value_type(vt: &ort::ValueType) -> Result<(Vec, TensorType), BackendError> { - match vt { - ort::ValueType::Tensor { ty, dimensions } => { - let dims = dimensions.clone(); - let ty = (*ty).try_into()?; - Ok((dims, ty)) - } - _ => Err(BackendError::BackendAccess(anyhow::anyhow!( - "unsupported input type: {vt:?}" - ))), - } -} - -fn convert_i64(i: &i64) -> Result { - u32::try_from(*i).map_err(|d| -> BackendError { - anyhow::anyhow!("unable to convert dimension to u32: {d}").into() - }) -} - -impl TryFrom for TensorType { - type Error = BackendError; - fn try_from(ty: ort::TensorElementType) -> Result { - match ty { - ort::TensorElementType::Float32 => Ok(TensorType::Fp32), - ort::TensorElementType::Float64 => Ok(TensorType::Fp64), - ort::TensorElementType::Uint8 => Ok(TensorType::U8), - ort::TensorElementType::Int32 => Ok(TensorType::I32), - ort::TensorElementType::Int64 => Ok(TensorType::I64), - _ => Err(BackendError::BackendAccess(anyhow::anyhow!( - "unsupported tensor type: {ty:?}" - ))), - } - } -} - -fn to_input_value(slot: &TensorSlot) -> Result<[ort::SessionInputValue<'_>; 1], BackendError> { - match &slot.tensor { - Some(tensor) => match tensor.ty { - TensorType::Fp32 => { - let data = bytes_to_f32_vec(tensor.data.to_vec()); - let dimensions = tensor - .dimensions - .iter() - .map(|d| *d as i64) // TODO: fewer conversions - .collect::>(); - Ok(inputs![(dimensions, Arc::new(data.into_boxed_slice()))] - .context("failed to create ONNX session input")?) - } - _ => { - unimplemented!("{:?} not supported by ONNX", tensor.ty); - } - }, - None => { - return Err(BackendError::BackendAccess(anyhow::anyhow!( - "missing input tensor: {}", - slot.shape.name - ))); - } - } -} - -pub fn f32_vec_to_bytes(data: Vec) -> Vec { - let chunks: Vec<[u8; 4]> = data.into_iter().map(|f| f.to_le_bytes()).collect(); - let result: Vec = chunks.iter().flatten().copied().collect(); - result -} - -pub fn bytes_to_f32_vec(data: Vec) -> Vec { - let chunks: Vec<&[u8]> = data.chunks(4).collect(); - let v: Vec = chunks - .into_iter() - .map(|c| f32::from_le_bytes(c.try_into().unwrap())) - .collect(); - - v.into_iter().collect() -} - -/// Returns whether the dimension is dynamic. -/// -/// ONNX uses [dimensional variables] (i.e., name strings) to indicate that the -/// value of a tensor dimension is user-defined, not fixed by the model. This is -/// useful for batching up several inference requests, e.g. When `ort` returns a -/// dimension of this kind, though, it uses `-1` to indicate that the dimension -/// is dynamic. -/// -/// [dimensional variables]: -/// https://onnx.ai/onnx/repo-docs/IR.html#static-tensor-shapes -fn is_dynamic_dimension(d: i64) -> bool { - d == -1 -} \ No newline at end of file diff --git a/crates/ml/src/backend/openvino.rs b/crates/ml/src/backend/openvino.rs index 2ade6eb0f9..6d420e4032 100644 --- a/crates/ml/src/backend/openvino.rs +++ b/crates/ml/src/backend/openvino.rs @@ -1,9 +1,9 @@ use spin_world::v2 as ml_wit; use ml_wit::graph::{ExecutionTarget, GraphBuilder, GraphEncoding}; +use ml_wit::tensor; use ml_wit::tensor::TensorType; -use crate::backend::tensor; use crate::backend::BackendGraph; use super::{BackendExecutionContext, BackendInner, TensorId}; @@ -13,7 +13,7 @@ use openvino::{DeviceType, ElementType, Shape, Tensor as OvTensor}; use std::path::PathBuf; -use crate::host_impl::{ExecutionContext, GraphInternalData, TensorInternalData}; +use crate::ml_host_impl::{ExecutionContext, GraphInternalData, TensorInternalData}; use anyhow::anyhow; use std::sync::{Arc, Mutex}; diff --git a/crates/ml/src/lib.rs b/crates/ml/src/lib.rs index 6e986ef258..2492b99096 100644 --- a/crates/ml/src/lib.rs +++ b/crates/ml/src/lib.rs @@ -1,4 +1,4 @@ pub mod backend; -pub mod host_component; -mod host_impl; +pub mod ml_host_component; +pub mod ml_host_impl; pub mod imagenet_download; diff --git a/crates/ml/src/host_component.rs b/crates/ml/src/ml_host_component.rs similarity index 71% rename from crates/ml/src/host_component.rs rename to crates/ml/src/ml_host_component.rs index 6304dae653..58a1b2b5d3 100644 --- a/crates/ml/src/host_component.rs +++ b/crates/ml/src/ml_host_component.rs @@ -1,12 +1,15 @@ use std::path::PathBuf; -use spin_app::DynamicHostComponent; -use spin_core::HostComponent; use spin_world::v2 as ml_wit; +use crate::{backend::BackendInner, ml_host_impl::MLHostImpl}; + +use spin_app::{AppComponent, DynamicHostComponent}; +use spin_core::HostComponent; + +#[cfg(feature = "openvino")] use crate::backend::openvino::OpenvinoBackend; -use crate::{backend::BackendInner, host_impl::MLHostImpl}; -//#[derive(Clone)] + pub struct MLHostComponent { pub state_dir: Option, } @@ -25,13 +28,13 @@ impl HostComponent for MLHostComponent { } fn build_data(&self) -> Self::Data { - let mut backends: Vec> = vec![]; - if let Ok(openvino) = openvino::Core::new() { - backends.push(Box::new(OpenvinoBackend { - openvino, + let backends: Vec> = vec![ + #[cfg(feature = "openvino")] + Box::new(OpenvinoBackend { + openvino: openvino::Core::new().unwrap(), state_dir: self.state_dir.clone(), - })); - } + }), + ]; MLHostImpl { state_dir: self.state_dir.clone(), @@ -42,11 +45,7 @@ impl HostComponent for MLHostComponent { } impl DynamicHostComponent for MLHostComponent { - fn update_data( - &self, - _data: &mut Self::Data, - _component: &spin_app::AppComponent, - ) -> anyhow::Result<()> { + fn update_data(&self, _data: &mut Self::Data, _component: &AppComponent) -> anyhow::Result<()> { /*let hosts = component .get_metadata(ALLOWED_HOSTS_KEY)? .unwrap_or_default(); diff --git a/crates/ml/src/host_impl.rs b/crates/ml/src/ml_host_impl.rs similarity index 100% rename from crates/ml/src/host_impl.rs rename to crates/ml/src/ml_host_impl.rs diff --git a/crates/templates/src/template.rs b/crates/templates/src/template.rs index a2204afd21..1175f7b9fb 100644 --- a/crates/templates/src/template.rs +++ b/crates/templates/src/template.rs @@ -230,6 +230,8 @@ impl Template { /// A human-readable description of the template, provided by the /// template author, or an empty string if no description was /// provided. + #[allow(unknown_lints)] + #[allow(clippy::manual_unwrap_or_default)] pub fn description_or_empty(&self) -> &str { match &self.description { Some(s) => s, diff --git a/crates/trigger-http/src/lib.rs b/crates/trigger-http/src/lib.rs index d0fb7ae7fc..dd134006aa 100644 --- a/crates/trigger-http/src/lib.rs +++ b/crates/trigger-http/src/lib.rs @@ -754,8 +754,6 @@ enum NotFoundRouteKind { #[cfg(test)] mod tests { - use anyhow::Result; - use super::*; #[test] diff --git a/crates/trigger-redis/src/tests.rs b/crates/trigger-redis/src/tests.rs index 6e0bb91d8b..02fd993098 100644 --- a/crates/trigger-redis/src/tests.rs +++ b/crates/trigger-redis/src/tests.rs @@ -1,7 +1,6 @@ use super::*; -use anyhow::Result; use redis::{Msg, Value}; -use spin_testing::{tokio, RedisTestConfig}; +use spin_testing::RedisTestConfig; fn create_trigger_event(channel: &str, payload: &str) -> redis::Msg { Msg::from_value(&redis::Value::Bulk(vec![ diff --git a/crates/trigger/src/lib.rs b/crates/trigger/src/lib.rs index 3e883bc46a..ab11a45953 100644 --- a/crates/trigger/src/lib.rs +++ b/crates/trigger/src/lib.rs @@ -208,7 +208,7 @@ impl TriggerExecutorBuilder { )?; self.loader.add_dynamic_host_component( &mut builder, - spin_ml::host_component::MLHostComponent { + spin_ml::ml_host_component::MLHostComponent { state_dir: runtime_config.state_dir(), }, )?; diff --git a/examples/http-rust-imagenet/src/file_server.rs b/examples/http-rust-imagenet/src/file_server.rs new file mode 100644 index 0000000000..5bcd10a151 --- /dev/null +++ b/examples/http-rust-imagenet/src/file_server.rs @@ -0,0 +1,638 @@ +use anyhow::{anyhow, Context, Result}; +use futures::SinkExt; +use http::{ + header::{ACCEPT_ENCODING, CACHE_CONTROL, CONTENT_ENCODING, CONTENT_TYPE, ETAG, IF_NONE_MATCH}, + HeaderName, StatusCode, Uri, +}; +use spin_sdk::http::{Fields, IncomingRequest, OutgoingResponse, ResponseOutparam}; +use std::{ + cmp::Ordering, + fmt, + fmt::Error, + fs::File, + io::{Cursor, Read}, + path::PathBuf, + str, + str::FromStr, +}; + +/// The default value for the cache control header. +const CACHE_CONTROL_DEFAULT_VALUE: &str = "max-age=60"; +/// Environment variable for the cache configuration. +const CACHE_CONTROL_ENV: &str = "CACHE_CONTROL"; +/// Brotli compression level 1-11. +/// +/// 5-6 is considered the balance between compression time and +/// resulting size. 3 is faster, but doesn't compress as much. +const BROTLI_LEVEL: u32 = 3; +/// Brotli content encoding identifier +const BROTLI_ENCODING: &str = "br"; +/// Gzip content encoding identifier +const GZIP_ENCODING: &str = "gzip"; +/// Deflate content encoding identifier +const DEFLATE_ENCODING: &str = "deflate"; +/// The path info header. +const PATH_INFO_HEADER: &str = "spin-path-info"; +/// The component route header +const COMPONENT_ROUTE_HEADER: &str = "spin-component-route"; +// Environment variable for the fallback path +const FALLBACK_PATH_ENV: &str = "FALLBACK_PATH"; +/// Environment variable for the custom 404 path +const CUSTOM_404_PATH_ENV: &str = "CUSTOM_404_PATH"; +/// Directory fallback path (trying to map `/about/` -> `/about/index.html`). +const DIRECTORY_FALLBACK_PATH: &str = "index.html"; +// FAVICON_ICO_FILENAME +const FAVICON_ICO_FILENAME: &str = "favicon.ico"; +// FAVICON_PNG_FILENAME +const FAVICON_PNG_FILENAME: &str = "favicon.png"; + +const BUFFER_SIZE: usize = 64 * 1024; +const DEFLATE_LEVEL: flate2::Compression = flate2::Compression::fast(); + +#[derive(PartialEq, Debug)] +struct ContentEncoding { + // We limit expressed encodings to ones that we support + encoding: SupportedEncoding, + weight: Option, +} + +impl fmt::Display for ContentEncoding { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.encoding)?; + + if let Some(weight) = self.weight { + write!(f, ";q={weight}")?; + } + + Ok(()) + } +} + +impl PartialEq for ContentEncoding { + fn eq(&self, other: &SupportedEncoding) -> bool { + self.encoding == *other + } +} + +impl PartialEq for &ContentEncoding { + fn eq(&self, other: &SupportedEncoding) -> bool { + self.encoding == *other + } +} + +impl PartialOrd for ContentEncoding { + fn partial_cmp(&self, other: &Self) -> Option { + let aweight = self.weight.unwrap_or(1.0); + let bweight = other.weight.unwrap_or(1.0); + match aweight.partial_cmp(&bweight) { + Some(Ordering::Equal) => match (self.encoding, other.encoding) { + // Always prefer brotli + (SupportedEncoding::Brotli, _) => Some(Ordering::Greater), + (_, SupportedEncoding::Brotli) => Some(Ordering::Less), + // Otherwise prefer the more specific option + (SupportedEncoding::None, _) => Some(Ordering::Less), + (_, SupportedEncoding::None) => Some(Ordering::Greater), + // Everything else is roughly equal + (_, _) => Some(Ordering::Equal), + }, + v => v, + } + } +} + +impl FromStr for ContentEncoding { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let mut parts = s.split(';'); + let encoding = parts.next().unwrap().trim(); + let encoding = + SupportedEncoding::from_str(encoding).context("failed to parse encoding type")?; + let Some(weight) = parts + .next() + .map(|s| s.trim()) + .and_then(|s| s.strip_prefix("q=")) + else { + return Ok(ContentEncoding { + encoding, + weight: None, + }); + }; + + let mut weight: f32 = weight + .trim() + .parse() + .context("failed to parse encoding weight")?; + weight = weight.clamp(0.0, 1.0); + + Ok(ContentEncoding { + encoding, + weight: Some(weight), + }) + } +} + +/// Common Content Encodings +#[derive(Debug, Eq, PartialEq, Copy, Clone)] +pub enum SupportedEncoding { + Brotli, + Deflate, + Gzip, + None, +} + +impl fmt::Display for SupportedEncoding { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let header_content = match self { + Self::Brotli => BROTLI_ENCODING, + Self::Deflate => DEFLATE_ENCODING, + Self::Gzip => GZIP_ENCODING, + Self::None => "", + }; + + write!(f, "{}", header_content)?; + Ok(()) + } +} + +impl FromStr for SupportedEncoding { + type Err = Error; + + fn from_str(s: &str) -> Result { + match s.trim().to_lowercase().as_str() { + BROTLI_ENCODING => Ok(Self::Brotli), + DEFLATE_ENCODING => Ok(Self::Deflate), + GZIP_ENCODING => Ok(Self::Gzip), + _ => Ok(Self::None), + } + } +} + +impl SupportedEncoding { + /// Return the best SupportedEncoding + fn best_encoding(headers: &[(String, Vec)]) -> Self { + let mut accepted_encodings: Vec = headers + .iter() + .filter(|(k, _)| HeaderName::from_bytes(k.as_bytes()).ok() == Some(ACCEPT_ENCODING)) + .flat_map(|(_, v)| { + str::from_utf8(v).ok().into_iter().flat_map(|v| { + v.split(',').filter_map(|v| { + let e = ContentEncoding::from_str(v).ok()?; + // Filter out "None" values to ensure some compression is + // preferred. This is mostly to be defensive to types we don't + // understand as we only parse encodings we support. + // It's probably subpar if somebody actually _doesn't_ want + // compression but supports it anyway. + (e.encoding != SupportedEncoding::None).then_some(e) + }) + }) + }) + .collect(); + + accepted_encodings.sort_by(|a, b| b.partial_cmp(a).unwrap_or(Ordering::Equal)); + + accepted_encodings + .first() + .map(|v| v.encoding) + .unwrap_or(SupportedEncoding::None) + } +} + +//#[spin_sdk::http_component] +async fn handle_request(req: IncomingRequest, res_out: ResponseOutparam) { + let headers = req.headers().entries(); + let enc = SupportedEncoding::best_encoding(&headers); + let mut path = headers + .iter() + .find_map(|(k, v)| (k.to_lowercase() == PATH_INFO_HEADER).then_some(v)) + .expect("PATH_INFO header must be set by the Spin runtime"); + + let component_route = headers + .iter() + .find_map(|(k, v)| (k.to_lowercase() == COMPONENT_ROUTE_HEADER).then_some(v)) + .expect("COMPONENT_ROUTE header must be set by the Spin runtime"); + + let uri = req + .uri() + .parse::() + .expect("URI is invalid") + .path() + .as_bytes() + .to_vec(); + if &uri == component_route && path.is_empty() { + path = &uri; + } + + let if_none_match = headers + .iter() + .find_map(|(k, v)| { + (HeaderName::from_bytes(k.as_bytes()).ok()? == IF_NONE_MATCH).then_some(v.as_slice()) + }) + .unwrap_or(b""); + match FileServer::make_response(path, enc, if_none_match) { + Ok((status, headers, reader)) => { + let fields = Fields::new(&headers); + let res = OutgoingResponse::new(status.as_u16(), &fields); + //let _ = res.set_status_code(); + let mut body = res.take_body(); + res_out.set(res); + if let Some(mut reader) = reader { + let mut buffer = vec![0_u8; BUFFER_SIZE]; + loop { + match reader.read(&mut buffer) { + Ok(0) => break, + Ok(count) => { + if let Err(e) = body.send(buffer[..count].to_vec()).await { + eprintln!("Error sending body: {e}"); + break; + } + } + Err(e) => { + eprintln!("Error reading file: {e}"); + break; + } + } + } + } + } + Err(e) => { + eprintln!("Error building response: {e}"); + let res = OutgoingResponse::new(500, &Fields::new(&headers)); + let mut body = res.take_body(); + res_out.set(res); + if let Err(e) = body.send(b"Internal Server Error".to_vec()).await { + eprintln!("Error sending body: {e}"); + } + } + } +} + +#[derive(Debug, Eq, PartialEq)] +enum FileServerPath { + Physical(PathBuf), + Embedded(&'static [u8]), + None, +} + +trait IsFavicon { + fn is_favicon(&self) -> bool; +} + +impl IsFavicon for PathBuf { + fn is_favicon(&self) -> bool { + match self.clone().file_name() { + Some(s) => s == FAVICON_ICO_FILENAME || s == FAVICON_PNG_FILENAME, + None => false, + } + } +} + +struct FileServer; +impl FileServer { + /// Resolve the requested path and then try to read the file. + /// None should indicate that the file does not exist after attempting fallback paths. + fn resolve_and_read(path: &str, encoding: SupportedEncoding) -> Option>> { + let reader = match Self::resolve(path) { + FileServerPath::Physical(path) => { + Some(Self::read(&path).map(|r| Box::new(r) as Box)) + } + FileServerPath::Embedded(resource) => { + Some(Ok(Box::new(Cursor::new(resource)) as Box)) + } + FileServerPath::None => None, + }?; + + Some(reader.map(|reader| match encoding { + SupportedEncoding::Brotli => Box::new(brotli::CompressorReader::new( + reader, + BUFFER_SIZE, + BROTLI_LEVEL, + 20, + )) as Box, + SupportedEncoding::Deflate => { + Box::new(flate2::read::DeflateEncoder::new(reader, DEFLATE_LEVEL)) + } + SupportedEncoding::Gzip => { + Box::new(flate2::read::GzEncoder::new(reader, DEFLATE_LEVEL)) + } + SupportedEncoding::None => reader, + })) + } + + /// Resolve the request path to a file path. + /// Returns a `FileServerPath` variant. + fn resolve(req_path: &str) -> FileServerPath { + // fallback to index.html if the path is empty + let mut path = if req_path.is_empty() { + PathBuf::from(DIRECTORY_FALLBACK_PATH) + } else { + PathBuf::from(req_path) + }; + + // if the path is a directory, try to read the fallback file relative to the directory + if path.is_dir() { + path.push(DIRECTORY_FALLBACK_PATH); + } + + // if still haven't found a file, override with the user-configured fallback path + if !path.exists() { + if let Ok(fallback_path) = std::env::var(FALLBACK_PATH_ENV) { + path = PathBuf::from(fallback_path); + } + } + + if path.exists() { + return FileServerPath::Physical(path); + } + + // check if user configured a custom 404 path + // if so, check if that path exists and return it instead of sending a plain 404 + if let Ok(custom_404) = std::env::var(CUSTOM_404_PATH_ENV) { + path = PathBuf::from(custom_404); + } + + if path.exists() { + FileServerPath::Physical(path) + } else { + FileServerPath::None + } + } + + /// Open the file given its path and return its content and content type header. + fn read(path: &PathBuf) -> Result { + File::open(path).with_context(|| anyhow!("cannot open {}", path.display())) + } + + /// Return the media type of the file based on the path. + fn mime(path: &str) -> Option { + match path { + FAVICON_ICO_FILENAME => mime_guess::from_ext("ico"), + FAVICON_PNG_FILENAME => mime_guess::from_ext("png"), + _ => mime_guess::from_path(path), + } + .first() + .map(|m| m.to_string()) + } + + fn make_headers(path: &str, enc: SupportedEncoding, etag: &str) -> Vec<(String, Vec)> { + let mut headers = Vec::new(); + let cache_control = match std::env::var(CACHE_CONTROL_ENV) { + Ok(c) => c, + Err(_) => CACHE_CONTROL_DEFAULT_VALUE.to_string(), + }; + headers.push(( + CACHE_CONTROL.as_str().to_string(), + cache_control.into_bytes(), + )); + headers.push((ETAG.as_str().to_string(), etag.as_bytes().to_vec())); + + match enc { + SupportedEncoding::Brotli => headers.push(( + CONTENT_ENCODING.as_str().to_string(), + BROTLI_ENCODING.as_bytes().to_vec(), + )), + SupportedEncoding::Deflate => headers.push(( + CONTENT_ENCODING.as_str().to_string(), + DEFLATE_ENCODING.as_bytes().to_vec(), + )), + SupportedEncoding::Gzip => headers.push(( + CONTENT_ENCODING.as_str().to_string(), + GZIP_ENCODING.as_bytes().to_vec(), + )), + SupportedEncoding::None => {} + } + + if let Some(mime) = Self::mime(path) { + headers.push((CONTENT_TYPE.as_str().to_string(), mime.into_bytes())); + }; + + headers + } + + #[allow(clippy::type_complexity)] + fn make_response( + path: &[u8], + enc: SupportedEncoding, + if_none_match: &[u8], + ) -> Result<(StatusCode, Vec<(String, Vec)>, Option>)> { + let path = str::from_utf8(path)?; + let reader = Self::resolve_and_read(path, enc).transpose()?; + let etag = Self::make_etag(reader)?; + let mut reader = Self::resolve_and_read(path, enc).transpose()?; + let mut headers = Self::make_headers(path, enc, &etag); + + let status = if reader.is_some() { + if etag.as_bytes() == if_none_match { + reader = None; + StatusCode::NOT_MODIFIED + } else { + StatusCode::OK + } + } else { + reader = Some(Box::new(Cursor::new(b"Not Found"))); + headers = Vec::new(); + StatusCode::NOT_FOUND + }; + + Ok((status, headers, reader)) + } + + fn make_etag(body: Option>) -> Result { + use sha2::Digest; + let mut hasher = sha2::Sha256::new(); + if let Some(mut reader) = body { + let mut buffer = vec![0_u8; BUFFER_SIZE]; + loop { + match reader.read(&mut buffer)? { + 0 => break, + count => { + hasher.update(&buffer[..count]); + } + } + } + } + Ok(hex::encode(hasher.finalize())) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use http::header::ACCEPT_ENCODING; + use scopeguard::defer; + use std::{fs, path::Path, sync::Mutex}; + + static TEST_ENV_MUTEX: Mutex<()> = Mutex::new(()); + + #[test] + fn test_best_encoding_none() { + let enc = SupportedEncoding::best_encoding(&[]); + assert_eq!(enc, SupportedEncoding::None); + } + + #[test] + fn test_best_encoding_with_unknown() { + let enc = SupportedEncoding::best_encoding(&[( + ACCEPT_ENCODING.to_string(), + b"some-weird-encoding".to_vec(), + )]); + assert_eq!(enc, SupportedEncoding::None); + } + + #[test] + fn test_best_encoding_with_weights() { + let enc = SupportedEncoding::best_encoding(&[( + ACCEPT_ENCODING.to_string(), + b"gzip;br;q=0.1".to_vec(), + )]); + assert_eq!(enc, SupportedEncoding::Gzip); + } + + #[test] + fn test_best_encoding_with_multiple_headers() { + let enc = SupportedEncoding::best_encoding(&[ + (ACCEPT_ENCODING.to_string(), b"gzip".to_vec()), + (ACCEPT_ENCODING.to_string(), b"br".to_vec()), + ]); + assert_eq!(enc, SupportedEncoding::Brotli); + } + + #[test] + fn test_best_encoding_with_gzip() { + let enc = + SupportedEncoding::best_encoding(&[(ACCEPT_ENCODING.to_string(), b"gzip".to_vec())]); + assert_eq!(enc, SupportedEncoding::Gzip); + } + + #[test] + fn test_best_encoding_with_deflate() { + let enc = + SupportedEncoding::best_encoding(&[(ACCEPT_ENCODING.to_string(), b"deflate".to_vec())]); + assert_eq!(enc, SupportedEncoding::Deflate); + } + + #[test] + fn test_best_encoding_with_br() { + let enc = + SupportedEncoding::best_encoding(&[(ACCEPT_ENCODING.to_string(), b"gzip,br".to_vec())]); + assert_eq!(enc, SupportedEncoding::Brotli); + } + + #[test] + fn test_serve_file_found() { + let (status, ..) = + FileServer::make_response(b"./hello-test.txt", SupportedEncoding::None, b"").unwrap(); + assert_eq!(status, StatusCode::OK); + } + + #[test] + fn test_serve_with_etag() { + let (status, _, reader) = FileServer::make_response( + b"./hello-test.txt", + SupportedEncoding::None, + b"4dca0fd5f424a31b03ab807cbae77eb32bf2d089eed1cee154b3afed458de0dc", + ) + .unwrap(); + assert_eq!(status, StatusCode::NOT_MODIFIED); + assert!(reader.is_none()); + } + + #[test] + fn test_serve_file_not_found() { + let (status, _, reader) = + FileServer::make_response(b"non-exisitent-file", SupportedEncoding::None, b"").unwrap(); + assert_eq!(status, StatusCode::NOT_FOUND); + let mut actual_body = Vec::new(); + reader.unwrap().read_to_end(&mut actual_body).unwrap(); + assert_eq!(actual_body.as_slice(), b"Not Found"); + } + + #[test] + fn test_serve_custom_404() { + let _lock = TEST_ENV_MUTEX.lock().unwrap(); + + // reuse existing asset as custom 404 doc + let custom_404_path = "hello-test.txt"; + let expected_body = + fs::read(Path::new(custom_404_path)).expect("Could not read custom 404 file"); + + std::env::set_var(CUSTOM_404_PATH_ENV, custom_404_path); + defer! { + std::env::remove_var(CUSTOM_404_PATH_ENV); + } + + let (status, _, reader) = + FileServer::make_response(b"non-exisitent-file", SupportedEncoding::None, b"").unwrap(); + assert_eq!(status, StatusCode::OK); + let mut actual_body = Vec::new(); + reader.unwrap().read_to_end(&mut actual_body).unwrap(); + assert_eq!(actual_body, expected_body); + } + + #[test] + fn test_serve_non_existing_custom_404() { + let _lock = TEST_ENV_MUTEX.lock().unwrap(); + + // provide a invalid path + let custom_404_path = "non-existing-404.html"; + + std::env::set_var(CUSTOM_404_PATH_ENV, custom_404_path); + defer! { + std::env::remove_var(CUSTOM_404_PATH_ENV); + } + + let (status, _, reader) = + FileServer::make_response(b"non-exisitent-file", SupportedEncoding::None, b"").unwrap(); + assert_eq!(status, StatusCode::NOT_FOUND); + let mut actual_body = Vec::new(); + reader.unwrap().read_to_end(&mut actual_body).unwrap(); + assert_eq!(actual_body.as_slice(), b"Not Found"); + } + + #[test] + fn test_serve_file_not_found_with_fallback_path() { + let _lock = TEST_ENV_MUTEX.lock().unwrap(); + + // reuse existing asset as fallback + let fallback_path = "hello-test.txt"; + let expected_body = + fs::read(Path::new(fallback_path)).expect("Could not read fallback file"); + + std::env::set_var(FALLBACK_PATH_ENV, fallback_path); + defer! { + std::env::remove_var(FALLBACK_PATH_ENV); + } + + let (status, _, reader) = + FileServer::make_response(b"non-exisitent-file", SupportedEncoding::None, b"").unwrap(); + assert_eq!(status, StatusCode::OK); + let mut actual_body = Vec::new(); + reader.unwrap().read_to_end(&mut actual_body).unwrap(); + assert_eq!(actual_body, expected_body); + } + + #[test] + fn test_serve_index() { + // Test against path with trailing slash + let (status, ..) = FileServer::make_response(b"./", SupportedEncoding::None, b"").unwrap(); + assert_eq!(status, StatusCode::OK); + + // Test against empty path + let (status, ..) = FileServer::make_response(b"", SupportedEncoding::None, b"").unwrap(); + assert_eq!(status, StatusCode::OK); + } + + #[test] + fn test_serve_fallback_favicon() { + let (status, _, reader) = FileServer::make_response( + FAVICON_PNG_FILENAME.as_bytes(), + SupportedEncoding::None, + b"", + ) + .unwrap(); + assert_eq!(status, StatusCode::OK); + let mut actual_body = Vec::new(); + reader.unwrap().read_to_end(&mut actual_body).unwrap(); + assert_eq!(actual_body, FALLBACK_FAVICON_PNG); + } +} diff --git a/examples/http-rust-imagenet/src/lib.rs b/examples/http-rust-imagenet/src/lib.rs index 62fc550654..89570aed92 100644 --- a/examples/http-rust-imagenet/src/lib.rs +++ b/examples/http-rust-imagenet/src/lib.rs @@ -19,10 +19,14 @@ mod ml { mod imagenet; mod imagenet_classes; +mod tokenizer; + +//use crate::token::LlamaTokenizer; use crate::imagenet::elapsed_to_string; use crate::imagenet::imagenet_infer; use crate::ml::fermyon::spin::graph; +use crate::tokenizer::LlamaTokenizer; fn parse_content_type(headers: &HeaderMap) -> Option { @@ -207,6 +211,11 @@ async fn imagenet_demo_handler(req: http::Request>) -> anyhow::Result { + let html_body = llama_handler(req)?; + let response = Response::builder().status(200).body(html_body).build(); + return Ok(response); + } "store" => { let contents = store_handler(req)?; let response = Response::builder().status(200).body(contents).build(); @@ -224,4 +233,95 @@ async fn imagenet_demo_handler(req: http::Request>) -> anyhow::Result>) -> anyhow::Result { + let res = match req.method() { + &Method::POST => { + let (parts, body) = req.into_parts(); + let x = parse_content_type(&parts.headers).unwrap(); + let boundary = x.get_param("boundary").unwrap(); + let mp = Multipart::with_body(&*body, boundary.as_str()); + + let form_data = llama_process_form(mp).unwrap(); + // DOWNLOAD FROM + // https://huggingface.co/facebook/m2m100_418M/resolve/main/sentencepiece.bpe.model + let tokenizer = LlamaTokenizer::new("llama/sentencepiece.bpe.model").unwrap(); + let vocab_size = tokenizer.vocab_size(false); + let token_ids = tokenizer.encode(&form_data.promt, true, true); + + + + let x = tokenizer.decode(&token_ids, false); + format!("form data = {form_data:?} vocab_size = {vocab_size} token_ids = {:?} decoded = '{x}'", token_ids) + } + _ => { + "HELLO from Llama handler".to_owned() + } + }; + + Ok(lamma_add_form(format!("
{res}
"))) +} +fn lamma_add_form(mut html_body: String) -> String { + let form = r#" + +
+

+ Enter text to process by llama +

+

+
+ +

+

+ + +

+

+ + +

+

+ +

+
+ "#; + html_body.push_str(form); + html_body +} + +#[derive(Debug)] +struct LlamaFormData { + promt: String, + target: String, +} + +fn llama_process_form(mut mp: Multipart<&[u8]>) -> Result { + // FORM DATA + let mut promt = "".to_owned(); + let mut target = "CPU".to_string(); + + while let Some(mut field) = mp.read_entry().unwrap() { + match field.headers.name.as_ref().to_owned().as_str() { + "promt" => { + target = "".to_string(); + let _bytes_read = field.data.read_to_string(&mut promt).unwrap(); + } + "target" => { + target = "".to_string(); + let _bytes_read = field.data.read_to_string(&mut target).unwrap(); + } + _ => {} + } + } + + + Ok(LlamaFormData { + promt, + target, + }) +} \ No newline at end of file diff --git a/examples/http-rust-imagenet/src/tokenizer.rs b/examples/http-rust-imagenet/src/tokenizer.rs new file mode 100644 index 0000000000..507d78d9da --- /dev/null +++ b/examples/http-rust-imagenet/src/tokenizer.rs @@ -0,0 +1,66 @@ +use rust_tokenizers::{ + error::TokenizerError, + tokenizer::{SentencePieceBpeTokenizer, Tokenizer, TruncationStrategy}, + vocab::Vocab, +}; +use std::result; + +const BOS_TOKEN_ID: i64 = 1; +const EOS_TOKEN_ID: i64 = 2; + +pub type Result = result::Result; + +pub struct LlamaTokenizer { + spm: SentencePieceBpeTokenizer, +} + +impl LlamaTokenizer { + pub fn new(tokenizer_path: &str) -> Result { + let lower_case = false; + SentencePieceBpeTokenizer::from_file(tokenizer_path, lower_case).map(|spm| Self { spm }) + } + + pub fn encode(&self, text: &str, include_bos: bool, include_eos: bool) -> Vec { + let pre = if include_bos { + vec![BOS_TOKEN_ID] + } else { + vec![] + }; + + let post = if include_eos { + vec![EOS_TOKEN_ID] + } else { + vec![] + }; + + let token_ids = self + .spm + .encode( + text, + None, + std::usize::MAX, + &TruncationStrategy::LongestFirst, + 0, + ) + .token_ids; + + [pre, token_ids, post] + .into_iter() + .flat_map(|v| v.into_iter()) + .collect() + } + + pub fn decode(&self, tokens: &[i64], skip_special_tokens: bool) -> String { + let clean_spaces = false; + self.spm.decode(tokens, skip_special_tokens, clean_spaces) + } + + pub fn vocab_size(&self, include_special_tokens: bool) -> usize { + let vocab = self.spm.vocab(); + if include_special_tokens { + vocab.values().len() + vocab.special_values().len() + } else { + vocab.values().len() + } + } +} diff --git a/examples/spin-timer/Cargo.lock b/examples/spin-timer/Cargo.lock index 0fda9848b6..4fa1ea45e4 100644 --- a/examples/spin-timer/Cargo.lock +++ b/examples/spin-timer/Cargo.lock @@ -2364,6 +2364,12 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" + [[package]] name = "hkdf" version = "0.12.4" @@ -4038,9 +4044,9 @@ dependencies = [ [[package]] name = "openvino" -version = "0.6.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24bd3a7ef39968e6a4f1b1206c1c876f9bd50cf739ccbcd69f8539bbac5dcc7a" +checksum = "aee013796927eec6012a344f10ecdc06bf26de79c626a2395e3f115464907ef6" dependencies = [ "openvino-finder", "openvino-sys", @@ -4049,9 +4055,9 @@ dependencies = [ [[package]] name = "openvino-finder" -version = "0.6.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d234d1394a413ea8adaf0c40806b9ad1946be6310b441f688840654a331973" +checksum = "af4c6841df4cd60fef743015f3348f81b6b225bd255ed0c4cab6e8c479e45eaa" dependencies = [ "cfg-if", "log", @@ -4059,9 +4065,9 @@ dependencies = [ [[package]] name = "openvino-sys" -version = "0.6.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c98acf37fc84ad9d7da4dc6c18f0f60ad209b43a6f555be01f9003d0a2a43d" +checksum = "f62fc2bd6882f2300a6b5017eaad292586d70995d333582aabcf1f1121cd147c" dependencies = [ "env_logger", "libloading", @@ -4494,9 +4500,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "polling" @@ -6115,6 +6121,23 @@ dependencies = [ "wasm-pkg-common", ] +[[package]] +name = "spin-ml" +version = "2.7.0-pre0" +dependencies = [ + "anyhow", + "curl", + "hex-literal", + "openvino", + "sha1 0.10.6", + "spin-app", + "spin-core", + "spin-world", + "table", + "tokio", + "tracing", +] + [[package]] name = "spin-outbound-networking" version = "2.7.0-pre0" @@ -6233,6 +6256,7 @@ dependencies = [ "spin-llm-remote-http", "spin-loader", "spin-manifest", + "spin-ml", "spin-outbound-networking", "spin-sqlite", "spin-sqlite-inproc", diff --git a/flake.nix b/flake.nix index 8afc13b149..ebbef0e8fc 100644 --- a/flake.nix +++ b/flake.nix @@ -50,6 +50,7 @@ openssl pkg-config rustTarget + onnxruntime ] ++ lib.optionals stdenv.isDarwin [ darwin.apple_sdk.frameworks.Accelerate diff --git a/src/commands/up.rs b/src/commands/up.rs index 87f5d61cf3..607bf114ec 100644 --- a/src/commands/up.rs +++ b/src/commands/up.rs @@ -648,8 +648,6 @@ fn trigger_command_for_resolved_app_source( #[cfg(test)] mod test { - use crate::commands::up::app_source::AppSource; - use super::*; fn repo_path(path: &str) -> String { diff --git a/tests/integration.rs b/tests/integration.rs index d050bada64..b00fba4c9e 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -1038,7 +1038,7 @@ route = "/..." /// Build an app whose component `workdir` is a subdirectory. #[test] - #[cfg(not(tarpaulin))] + // #[cfg(not(tarpaulin))] fn test_build_command_nested_workdir() -> anyhow::Result<()> { do_test_build_command("tests/testcases/nested-build") }