From 8ad47afda6e084ee2c0d1cab54e5ee981fd686af Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Thu, 17 Aug 2023 11:39:21 +0530 Subject: [PATCH 01/11] Resolved Conflicts Signed-off-by: Adithya Krishna --- docs/develop/deploy/cri-runtime/containerd.md | 2 +- docs/embed/c++/intro.md | 97 +++- docs/embed/use-case/mesh/_category_.json | 8 + docs/embed/use-case/mesh/dapr.md | 263 ++++++++++ docs/embed/use-case/mesh/eventmesh.md | 10 + docs/embed/use-case/reactr.md | 376 +++++++++++++++ .../embed/use-case/serverless/_category_.json | 8 + docs/embed/use-case/serverless/aws.md | 266 ++++++++++ docs/embed/use-case/serverless/netlify.md | 189 ++++++++ docs/embed/use-case/serverless/secondstate.md | 18 + docs/embed/use-case/serverless/tencent.md | 11 + docs/embed/use-case/serverless/vercel.md | 191 ++++++++ docs/embed/use-case/wasm-smart-devices.md | 2 +- docs/embed/use-case/web-app.md | 101 ++++ docs/start/wasmedge/comparison.md | 29 ++ docu.js | 453 ++++++++++++++++++ docusaurus.config.js | 19 +- .../develop/deploy/cri-runtime/containerd.md | 2 +- .../current/embed/c++/intro.md | 97 +++- .../embed/use-case/mesh/_category_.json | 8 + .../current/embed/use-case/mesh/dapr.md | 263 ++++++++++ .../current/embed/use-case/mesh/eventmesh.md | 10 + .../current/embed/use-case/reactr.md | 376 +++++++++++++++ .../embed/use-case/serverless/_category_.json | 8 + .../current/embed/use-case/serverless/aws.md | 266 ++++++++++ .../embed/use-case/serverless/netlify.md | 189 ++++++++ .../embed/use-case/serverless/secondstate.md | 18 + .../embed/use-case/serverless/tencent.md | 11 + .../embed/use-case/serverless/vercel.md | 191 ++++++++ .../embed/use-case/wasm-smart-devices.md | 2 +- .../current/embed/use-case/web-app.md | 101 ++++ .../current/start/wasmedge/comparison.md | 29 ++ 32 files changed, 3599 insertions(+), 15 deletions(-) create mode 100644 docs/embed/use-case/mesh/_category_.json create mode 100644 docs/embed/use-case/mesh/dapr.md create mode 100644 docs/embed/use-case/mesh/eventmesh.md create mode 100644 docs/embed/use-case/reactr.md create mode 100644 docs/embed/use-case/serverless/_category_.json create mode 100644 docs/embed/use-case/serverless/aws.md create mode 100644 docs/embed/use-case/serverless/netlify.md create mode 100644 docs/embed/use-case/serverless/secondstate.md create mode 100644 docs/embed/use-case/serverless/tencent.md create mode 100644 docs/embed/use-case/serverless/vercel.md create mode 100644 docs/embed/use-case/web-app.md create mode 100644 docs/start/wasmedge/comparison.md create mode 100644 docu.js create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/_category_.json create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/_category_.json create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/tencent.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/wasmedge/comparison.md diff --git a/docs/develop/deploy/cri-runtime/containerd.md b/docs/develop/deploy/cri-runtime/containerd.md index 002cb7985..5019e2d47 100644 --- a/docs/develop/deploy/cri-runtime/containerd.md +++ b/docs/develop/deploy/cri-runtime/containerd.md @@ -2,7 +2,7 @@ sidebar_position: 1 --- -# 8.6.1 Deploy with containerd's runwasi +# Deploy with containerd's runwasi :::info diff --git a/docs/embed/c++/intro.md b/docs/embed/c++/intro.md index 8c78bf19e..e6b7af9f5 100644 --- a/docs/embed/c++/intro.md +++ b/docs/embed/c++/intro.md @@ -4,7 +4,96 @@ sidebar_position: 1 # WasmEdge C++ SDK Introduction - -:::info -Work in Progress -::: +The WasmEdge C++ SDK is a collection of headers and libraries that allow you to build and deploy WebAssembly (Wasm) modules for execution on WasmEdge devices. It includes a CMake project and a set of command-line tools that you can use to build and deploy your Wasm modules. + +## Quick Start Guide + +To get started with WasmEdge, follow these steps: + +Install the WasmEdge C/C++ SDK: Download C++ SDK from the WasmEdge [website](https://wasmedge.org/docs/embed/quick-start/install) and follow the instructions to install it on your development machine + +```cpp +#include +#include + +int main(int argc, char** argv) { + /* Create the configure context and add the WASI support. */ + /* This step is not necessary unless you need WASI support. */ + WasmEdge_ConfigureContext* conf_cxt = WasmEdge_ConfigureCreate(); + WasmEdge_ConfigureAddHostRegistration(conf_cxt, WasmEdge_HostRegistration_Wasi); + /* The configure and store context to the VM creation can be NULL. */ + WasmEdge_VMContext* vm_cxt = WasmEdge_VMCreate(conf_cxt, nullptr); + + /* The parameters and returns arrays. */ + WasmEdge_Value params[1] = { WasmEdge_ValueGenI32(40) }; + WasmEdge_Value returns[1]; + /* Function name. */ + WasmEdge_String func_name = WasmEdge_StringCreateByCString("fib"); + /* Run the WASM function from file. */ + WasmEdge_Result res = WasmEdge_VMRunWasmFromFile(vm_cxt, argv[1], func_name, params, 1, returns, 1); + + if (WasmEdge_ResultOK(res)) { + std::cout << "Get result: " << WasmEdge_ValueGetI32(returns[0]) << std::endl; + } else { + std::cout << "Error message: " << WasmEdge_ResultGetMessage(res) << std::endl; + } + + /* Resources deallocations. */ + WasmEdge_VMDelete(vm_cxt); + WasmEdge_ConfigureDelete(conf_cxt); + WasmEdge_StringDelete(func_name); + return 0; +} +``` + +You can use the -I flag to specify the include directories and the -L and -l flags to specify the library directories and library names, respectively. +Then you can compile the code and run: ( the 40th fibonacci number is 102334155) + +```bash +gcc example.cpp -x c++ -I/path/to/wasmedge/include -L/path/to/wasmedge/lib -lwasmedge -o example +``` + +To run the `example` executable that was created in the previous step, you can use the following command + +```bash +./example +``` + +## Quick Start Guide in AOT compiler + +```cpp +#include +#include + +int main(int argc, const char* argv[]) { + // Create the configure context and add the WASI support. + // This step is not necessary unless you need WASI support. + wasmedge_configure_context* conf_cxt = wasmedge_configure_create(); + wasmedge_configure_add_host_registration(conf_cxt, WASMEDGE_HOST_REGISTRATION_WASI); + + // Create the VM context in AOT mode. + wasmedge_vm_context* vm_cxt = wasmedge_vm_create_aot(conf_cxt, NULL); + + // The parameters and returns arrays. + wasmedge_value params[1] = { wasmedge_value_gen_i32(32) }; + wasmedge_value returns[1]; + // Function name. + wasmedge_string func_name = wasmedge_string_create_by_cstring("fib"); + // Run the WASM function from file. + wasmedge_result res = wasmedge_vm_run_wasm_from_file(vm_cxt, argv[1], func_name, params, 1, returns, 1); + + if (wasmedge_result_ok(res)) { + printf("Get result: %d\n", wasmedge_value_get_i32(returns[0])); + } else { + printf("Error message: %s\n", wasmedge_result_get_message(res)); + } + + // Resources deallocations. + wasmedge_vm_delete(vm_cxt); + wasmedge_configure_delete(conf_cxt); + wasmedge_string_delete(func_name); + return 0; +} +``` + +In this example, the wasmedge_vm_create_aot function is used to create a wasmedge_vm_context object in AOT mode, which is then passed as the second argument to the wasmedge_vm_run_wasm_from_file function to execute the Wasm module in AOT mode. \ No newline at end of file diff --git a/docs/embed/use-case/mesh/_category_.json b/docs/embed/use-case/mesh/_category_.json new file mode 100644 index 000000000..75cd42031 --- /dev/null +++ b/docs/embed/use-case/mesh/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "Service mesh and Runtimes", + "position": 8, + "link": { + "type": "generated-index", + "description": "WasmEdge could be a lightweight runtime for sidecar microservices and the API proxy as the Docker alternative." + } +} diff --git a/docs/embed/use-case/mesh/dapr.md b/docs/embed/use-case/mesh/dapr.md new file mode 100644 index 000000000..4adf085d8 --- /dev/null +++ b/docs/embed/use-case/mesh/dapr.md @@ -0,0 +1,263 @@ +--- +sidebar_position: 1 +--- + +# Dapr + +In this article, I will demonstrate how to use WasmEdge as a sidecar application runtime for Dapr. There are two ways to do this: + +* **Standalone WasmEdge** is the **recommended approach** is to write a microservice using [Rust](../../../write_wasm/rust/networking-nonblocking.md) or [JavaScript](../../../write_wasm/js/networking.md), and run it in WasmEdge. The WasmEdge application serves web requests and communicates with the sidecar via sockets using the Dapr API. In this case, we can [run WasmEdge as a managed container in k8s](../../kubernetes/quickstart.md). +* Alternatively, Embedded WasmEdge is to create a simple microservice in Rust or Go to listen for web requests and communicate with the Dapr sidecar. It passes the request data to a WasmEdge runtime for processing. The business logic of the microservice is a WebAssembly function created and deployed by an application developer. + +> While the first approach (running the entire microservice in WasmEdge) is much preferred, we are still working on a fully fledged Dapr SDKs for WasmEdge. You can track their progress in GitHub issues -- [Rust](https://github.com/WasmEdge/WasmEdge/issues/1571) and [JavaScript](https://github.com/WasmEdge/WasmEdge/issues/1572). + +## Quick start + +First you need to install [Dapr](https://docs.dapr.io/getting-started/install-dapr-cli) and [WasmEdge](../../../quick_start/install.md). [Go](https://golang.org/doc/install) and [Rust](https://www.rust-lang.org/tools/install) are optional for the standalone WasmEdge approach. However, they are required for the demo app since it showcases both standalone and embedded WasmEdge approaches. + +Fork or clone the demo application from Github. You can use this repo as your own application template. + +```bash +git clone https://github.com/second-state/dapr-wasm +```` + +The demo has 4 Dapr sidecar applications. The [web-port](https://github.com/second-state/dapr-wasm/tree/main/web-port) project provides a public web service for a static HTML page. This is the application’s UI. From the static HTML page, the user can select a microservice to turn an input image into grayscale. All 3 microsoervices below perform the same function. They are just implemented using different approaches. + +* **Standalone WasmEdge approach:** The [image-api-wasi-socket-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-wasi-socket-rs) project provides a standalone WasmEdge sidecar microservice that takes the input image and returns the grayscale image. The microservice is written in Rust and compiled into WebAssembly bytecode to run in WasmEdge. +* Embedded WasmEdge approach #1: The [image-api-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) project provides a simple Rust-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. +* Embedded WasmEdge approach #2: The [image-api-go](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) project provides a simple Go-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. + +You can follow the instructions in the [README](https://github.com/second-state/dapr-wasm/blob/main/README.md) to start the sidecar services. Here are commands to build the WebAssembly functions and start the sidecar services. The first set of commands deploy the static web page service and the standalone WasmEdge service written in Rust. It forms a complete application to turn an input image into grayscale. + +```bash +# Build and start the static HTML web page service for the UI and router for sending the uploaded image to the grayscale microservice +cd web-port +go build +./run_web.sh +cd ../ + +# Build the standalone image grayscale web service for WasmEdge +cd image-api-wasi-socket-rs +cargo build --target wasm32-wasi +cd ../ + +# Run the microservice as a Dapr sidecar app +cd image-api-wasi-socket-rs +./run_api_wasi_socket_rs.sh +cd ../ +``` + +The second set of commands create the alternative microservices for the embedded WasmEdge function. + +```bash +# Build the grayscale WebAssembly functions, and deploy them to the sidecar projects +cd functions/grayscale +./build.sh +cd ../../ + +# Build and start the Rust-based microservice for embedding the grayscale WasmEdge function +cd image-api-rs +cargo build --release +./run_api_rs.sh +cd ../ + +# Build and start the Go-based microservice for embedding the grayscale WasmEdge function +cd image-api-go +go build +./run_api_go.sh +cd ../ +``` + +Finally, you should be able to see the web UI in your browser. + +## Recommended: The standalone WasmEdge microservice in Rust + +The [standalone WasmEdge microservice](https://github.com/second-state/dapr-wasm/blob/main/image-api-wasi-socket-rs/src/main.rs) starts a non-blocking TCP server inside WasmEdge. The TCP server passes incoming requests to `handle_client()`, which passes HTTP requests to `handle_http()`, which calls `grayscale()` to process the image data in the request. + +```rust +fn main() -> std::io::Result<()> { + let port = std::env::var("PORT").unwrap_or(9005.to_string()); + println!("new connection at {}", port); + let listener = TcpListener::bind(format!("127.0.0.1:{}", port))?; + loop { + let _ = handle_client(listener.accept()?.0); + } +} + +fn handle_client(mut stream: TcpStream) -> std::io::Result<()> { + ... ... +} + +fn handle_http(req: Request>) -> bytecodec::Result> { + ... ... +} + +fn grayscale(image: &[u8]) -> Vec { + let detected = image::guess_format(&image); + let mut buf = vec![]; + if detected.is_err() { + return buf; + } + + let image_format_detected = detected.unwrap(); + let img = image::load_from_memory(&image).unwrap(); + let filtered = img.grayscale(); + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + } + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + } + }; + return buf; +} +``` + +> Work in progress: It will soon interact with the Dapr sidecar through the [WasmEdge Dapr SDK in Rust](https://github.com/WasmEdge/WasmEdge/issues/1571). + +Now, you can build the microservice. It is a simple matter of compiling from Rust to WebAssembly. + +```bash +cd image-api-wasi-socket-rs +cargo build --target wasm32-wasi +``` + +Deploy the WasmEdge microservice in Dapr as follows. + +```bash +dapr run --app-id image-api-wasi-socket-rs \ + --app-protocol http \ + --app-port 9005 \ + --dapr-http-port 3503 \ + --components-path ../config \ + --log-level debug \ + wasmedge ./target/wasm32-wasi/debug/image-api-wasi-socket-rs.wasm +``` + +## Alternative: The embedded WasmEdge microservices + +The embedded WasmEdge approach requires us to create a WebAssembly function for the business logic (image processing) first, and then embed it into simple Dapr microservices. + +### Rust function for image processing + +The [Rust function](https://github.com/second-state/dapr-wasm/blob/main/functions/grayscale/src/lib.rs) is simple. It uses the [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md) macro to makes it easy to call the function from a Go or Rust host embedding the WebAssembly function. It takes and returns base64 encoded image data for the web. + +```rust +#[wasmedge_bindgen] +pub fn grayscale(image_data: String) -> String { + let image_bytes = image_data.split(",").map(|x| x.parse::().unwrap()).collect::>(); + return grayscale::grayscale_internal(&image_bytes); +} +``` + +The Rust function that actually performs the task is as follows. + +```rust +pub fn grayscale_internal(image_data: &[u8]) -> String { + let image_format_detected: ImageFormat = image::guess_format(&image_data).unwrap(); + let img = image::load_from_memory(&image_data).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + } + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + } + }; + let mut base64_encoded = String::new(); + base64::encode_config_buf(&buf, base64::STANDARD, &mut base64_encoded); + return base64_encoded.to_string(); +} +``` + +### The Go host wrapper for microservice + +The [Go-based microservice](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) embeds the above imaging processing function in WasmEdge. The [microservice itself](https://github.com/second-state/dapr-wasm/blob/main/image-api-go/image_api.go) is a web server and utilizes the Dapr Go SDK. + +```go +func main() { + s := daprd.NewService(":9003") + + if err := s.AddServiceInvocationHandler("/api/image", imageHandlerWASI); err != nil { + log.Fatalf("error adding invocation handler: %v", err) + } + + if err := s.Start(); err != nil && err != http.ErrServerClosed { + log.Fatalf("error listening: %v", err) + } +} +``` + +The `imageHandlerWASI()` function [starts a WasmEdge instance](../../../sdk/go/function.md) and calls the image processing (grayscale) function in it via [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md). + +Build and deploy the Go microservice to Dapr as follows. + +```bash +cd image-api-go +go build +dapr run --app-id image-api-go \ + --app-protocol http \ + --app-port 9003 \ + --dapr-http-port 3501 \ + --log-level debug \ + --components-path ../config \ + ./image-api-go +``` + +### The Rust host wrapper for microservice + +The [Rust-based microservice](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) embeds the above imaging processing function in WasmEdge. The [microservice itself](https://github.com/second-state/dapr-wasm/blob/main/image-api-rs/src/main.rs) is a Tokio and Warp based web server. + +```rust +#[tokio::main] +pub async fn run_server(port: u16) { + pretty_env_logger::init(); + let home = warp::get().map(warp::reply); + + let image = warp::post() + .and(warp::path("api")) + .and(warp::path("image")) + .and(warp::body::bytes()) + .map(|bytes: bytes::Bytes| { + let v: Vec = bytes.iter().map(|&x| x).collect(); + let res = image_process_wasmedge_sys(&v); + let _encoded = base64::encode(&res); + Response::builder() + .header("content-type", "image/png") + .body(res) + }); + + let routes = home.or(image); + let routes = routes.with(warp::cors().allow_any_origin()); + + let log = warp::log("dapr_wasm"); + let routes = routes.with(log); + warp::serve(routes).run((Ipv4Addr::UNSPECIFIED, port)).await +} +``` + +The `image_process_wasmedge_sys()` function [starts a WasmEdge instance](../../../sdk/rust/sys_run_host_func.md) and calls the image processing (grayscale) function in it via [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md). + +Build and deploy the Rust microservice to Dapr as follows. + +```bash +cd image-api-rs +cargo build --release +dapr stop image-api-rs + +# Change this to your own path for WasmEdge +export LD_LIBRARY_PATH=/home/coder/.wasmedge/lib64/ + +dapr run --app-id image-api-rs \ + --app-protocol http \ + --app-port 9004 \ + --dapr-http-port 3502 \ + --components-path ../config \ + --log-level debug \ + ./target/release/image-api-rs +``` + +That's it! [Let us know](https://github.com/WasmEdge/WasmEdge/discussions) your cool Dapr microservices in WebAssembly! diff --git a/docs/embed/use-case/mesh/eventmesh.md b/docs/embed/use-case/mesh/eventmesh.md new file mode 100644 index 000000000..b39b7b31b --- /dev/null +++ b/docs/embed/use-case/mesh/eventmesh.md @@ -0,0 +1,10 @@ +--- +sidebar_position: 2 +--- + +# Apache Eventmesh + + +:::info +Coming Soon or you can [help out](https://github.com/WasmEdge/WasmEdge/issues/632) +::: \ No newline at end of file diff --git a/docs/embed/use-case/reactr.md b/docs/embed/use-case/reactr.md new file mode 100644 index 000000000..2b31bf21a --- /dev/null +++ b/docs/embed/use-case/reactr.md @@ -0,0 +1,376 @@ +--- +sidebar_position: 7 +--- + +# Reactr + +[Reactr](https://github.com/suborbital/reactr) is a fast, performant function scheduling library written in Go. Reactr is designed to be flexible, with the ability to run embedded in your Go applications and first-class support for WebAssembly. +Taking advantage of Go's superior concurrency capabilities, Reactr can manage and execute hundreds of WebAssembly runtime instances all at once, making a great framework for server-side applications. + +Reactr allows you to run WebAssembly functions in Go, so does the [WasmEdge Go SDK](../../../sdk/go.md). +The unique feature of Reactr is that it provides a rich set of host functions in Go, which support access to networks and databases etc. Reactr then provides Rust (and Swift / AssemblyScript) APIs to call those host functions from within the WebAssembly function. + +In this article, we will show you how to use WasmEdge together with Reactr to take advantage of the best of both worlds. WasmEdge is the [fastest and most extensible WebAssembly runtime](../../../features.md). +It is also the fastest in [Reactr's official test suite](https://github.com/suborbital/reactr/runs/4476074960?check_suite_focus=true). +We will show you how to run Rust functions compiled to WebAssembly as well as JavaScript programs in WasmEdge and Reactr. + +> WasmEdge provides [advanced support for JavaScript](../../../write_wasm/js.md) including [mixing Rust with JavaScript](../../../write_wasm/js/rust.md) for improved performance. + +* [Hello world](#hello-world) +* [Database query](#database-query) +* [Embed JavaScript in Go](#embed-javascript-in-go) + +## Prerequisites + +You need have [Rust](https://www.rust-lang.org/tools/install), [Go](https://go.dev/doc/install), and [WasmEdge](../../../quick_start/install.md) installed on your system. +The GCC compiler (installed via the `build-essential` package) is also needed for WasmEdge. + +```bash +sudo apt-get update +sudo apt-get -y upgrade +sudo apt install build-essential + +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +source $HOME/.cargo/env +rustup target add wasm32-wasi + +curl -OL https://golang.org/dl/go1.17.5.linux-amd64.tar.gz +sudo tar -C /usr/local -xvf go1.17.5.linux-amd64.tar.gz +export PATH=$PATH:/usr/local/go/bin + +wget -qO- https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash +source $HOME/.wasmedge/env +``` + +## Hello world + +A simple `hello world` example for Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/hello). + +### Hello world: Rust function compiled to WebAssembly + +Let's first create [a simple Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/hello-echo/src/lib.rs) to echo hello. +The Rust function `HelloEcho::run()` is as follows. It will be exposed to the Go host application through Reactr. + +```rust +use suborbital::runnable::*; + +struct HelloEcho{} + +impl Runnable for HelloEcho { + fn run(&self, input: Vec) -> Result, RunErr> { + let in_string = String::from_utf8(input).unwrap(); + Ok(format!("hello {}", in_string).as_bytes().to_vec()) + } +} +``` + +Let's build the Rust function into a WebAssembly bytecode file. + +```bash +cd hello-echo +cargo build --target wasm32-wasi --release +cp target/wasm32-wasi/release/hello_echo.wasm .. +cd .. +``` + +### Hello world: Go host application + +Next, lets look into the [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/main.go) that executes the WebAssembly functions. +The `runBundle()` function executes the `run()` function in the `Runnable` struct once. + +```go +func runBundle() { + r := rt.New() + doWasm := r.Register("hello-echo", rwasm.NewRunner("./hello_echo.wasm")) + + res, err := doWasm([]byte("wasmWorker!")).Then() + if err != nil { + fmt.Println(err) + return + } + + fmt.Println(string(res.([]byte))) +} +``` + +The `runGroup()` function executes the Rust-compiled WebAssembly `run()` function multiple times asynchronously in a group, and receives the results as they come in. + +```go +func runGroup() { + r := rt.New() + + doWasm := r.Register("hello-echo", rwasm.NewRunner("./hello_echo.wasm")) + + grp := rt.NewGroup() + for i := 0; i < 100000; i++ { + grp.Add(doWasm([]byte(fmt.Sprintf("world %d", i)))) + } + + if err := grp.Wait(); err != nil { + fmt.Println(err) + } +} +``` + +Finally, let's run the Go host application and see the results printed to the console. + +> You must use the `-tags wasmedge` flag to take advantage of the performance and extended WebAssembly APIs provided by WasmEdge. + +```bash +go mod tidy +go run -tags wasmedge main.go +``` + +## Database query + +In [this example](https://github.com/second-state/wasm-learning/tree/master/reactr/db), we will demonstrate how to use Reactr host functions and APIs to query a PostgreSQL database from your WebAssembly function. + +### Database query: Install and set up a PostgreSQL database + +We will start a PostgreSQL instance through Docker. + +```bash +docker pull postgres +docker run --name reactr-postgres -p 5432:5432 -e POSTGRES_PASSWORD=12345 -d postgres +``` + +Next, let's create a database and populate it with some sample data. + +```bash +$ docker run -it --rm --network host postgres psql -h 127.0.0.1 -U postgres +postgres=# CREATE DATABASE reactr; +postgres=# \c reactr; + +# Create a table: +postgres=# CREATE TABLE users ( + uuid varchar(100) CONSTRAINT firstkey PRIMARY KEY, + email varchar(50) NOT NULL, + created_at date, + state char(1), + identifier integer +); +``` + +Leave this running and start another terminal window to interact with this PostgreSQL server. + +### Database query: Rust function compiled to WebAssembly + +Let's create [a Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/db/rs-db/src/lib.rs) to access the PostgreSQL database. +The Rust function `RsDbtest::run()` is as follows. It will be exposed to the Go host application through Reactr. It uses named queries such as `PGInsertUser` and `PGSelectUserWithUUID` to operate the database. Those queries are defined in the Go host application, and we will see them later. + +```rust +use suborbital::runnable::*; +use suborbital::db; +use suborbital::util; +use suborbital::db::query; +use suborbital::log; +use uuid::Uuid; + +struct RsDbtest{} + +impl Runnable for RsDbtest { + fn run(&self, _: Vec) -> Result, RunErr> { + let uuid = Uuid::new_v4().to_string(); + + let mut args: Vec = Vec::new(); + args.push(query::QueryArg::new("uuid", uuid.as_str())); + args.push(query::QueryArg::new("email", "connor@suborbital.dev")); + + match db::insert("PGInsertUser", args) { + Ok(_) => log::info("insert successful"), + Err(e) => { + return Err(RunErr::new(500, e.message.as_str())) + } + }; + + let mut args2: Vec = Vec::new(); + args2.push(query::QueryArg::new("uuid", uuid.as_str())); + + match db::update("PGUpdateUserWithUUID", args2.clone()) { + Ok(rows) => log::info(format!("update: {}", util::to_string(rows).as_str()).as_str()), + Err(e) => { + return Err(RunErr::new(500, e.message.as_str())) + } + } + + match db::select("PGSelectUserWithUUID", args2.clone()) { + Ok(result) => log::info(format!("select: {}", util::to_string(result).as_str()).as_str()), + Err(e) => { + return Err(RunErr::new(500, e.message.as_str())) + } + } + + match db::delete("PGDeleteUserWithUUID", args2.clone()) { + Ok(rows) => log::info(format!("delete: {}", util::to_string(rows).as_str()).as_str()), + Err(e) => { + return Err(RunErr::new(500, e.message.as_str())) + } + } + + ... ... + } +} +``` + +Let's build the Rust function into a WebAssembly bytecode file. + +```bash +cd rs-db +cargo build --target wasm32-wasi --release +cp target/wasm32-wasi/release/rs_db.wasm .. +cd .. +``` + +### Database query: Go host application + +The [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/db/main.go) first defines the SQL queries and gives each of them a name. +We will then pass those queries to the Reactr runtime as a configuration. + +```go +func main() { + dbConnString, exists := os.LookupEnv("REACTR_DB_CONN_STRING") + if !exists { + fmt.Println("skipping as conn string env var not set") + return + } + + q1 := rcap.Query{ + Type: rcap.QueryTypeInsert, + Name: "PGInsertUser", + VarCount: 2, + Query: ` + INSERT INTO users (uuid, email, created_at, state, identifier) + VALUES ($1, $2, NOW(), 'A', 12345)`, + } + + q2 := rcap.Query{ + Type: rcap.QueryTypeSelect, + Name: "PGSelectUserWithUUID", + VarCount: 1, + Query: ` + SELECT * FROM users + WHERE uuid = $1`, + } + + q3 := rcap.Query{ + Type: rcap.QueryTypeUpdate, + Name: "PGUpdateUserWithUUID", + VarCount: 1, + Query: ` + UPDATE users SET state='B' WHERE uuid = $1`, + } + + q4 := rcap.Query{ + Type: rcap.QueryTypeDelete, + Name: "PGDeleteUserWithUUID", + VarCount: 1, + Query: ` + DELETE FROM users WHERE uuid = $1`, + } + + config := rcap.DefaultConfigWithDB(vlog.Default(), rcap.DBTypePostgres, dbConnString, []rcap.Query{q1, q2, q3, q4}) + + r, err := rt.NewWithConfig(config) + if err != nil { + fmt.Println(err) + return + } + + ... ... +} +``` + +Then, we can run the WebAssembly function from Reactr. + +```go +func main() { + ... ... + + doWasm := r.Register("rs-db", rwasm.NewRunner("./rs_db.wasm")) + + res, err := doWasm(nil).Then() + if err != nil { + fmt.Println(err) + return + } + + fmt.Println(string(res.([]byte))) +} +``` + +Finally, let's run the Go host application and see the results printed to the console. + +> You must use the `-tags wasmedge` flag to take advantage of the performance and extended WebAssembly APIs provided by WasmEdge. + +```bash +export REACTR_DB_CONN_STRING='postgresql://postgres:12345@127.0.0.1:5432/reactr' +go mod tidy +go run -tags wasmedge main.go +``` + +## Embed JavaScript in Go + +As we mentioned, a key feature of the WasmEdge Runtime is its advanced [JavaScript support](../../../write_wasm/js.md), which allows JavaScript programs to run in lightweight, high-performance, safe, multi-language, and [Kubernetes-managed WasmEdge containers](../../kubernetes.md). +A simple example of embedded JavaScript function in Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs). + +### JavaScript example + +The [JavaScript example function](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/hello.js) is very simple. It just returns a string value. + +```javascript +let h = 'hello'; +let w = 'wasmedge'; +`${h} ${w}`; +``` + +### JavaScript example: Go host application + +The [Go host app](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/main.go) uses the Reactr API to run WasmEdge's standard JavaScript interpreter [rs_embed_js.wasm](https://github.com/second-state/wasm-learning/blob/master/reactr/quickjs/rs_embed_js.wasm). You can build your own version of JavaScript interpreter by modifying [this Rust project](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/rs-embed-js). + +> Learn more about how to embed [JavaScript code in Rust](https://github.com/second-state/wasmedge-quickjs/tree/main/examples/embed_js), and how to [use Rust to implement JavaScript APIs](../../../write_wasm/js/rust.md) in WasmEdge. + +The Go host application just need to start the job for `rs_embed_js.wasm` and pass the JavaScript content to it. The Go application can then capture and print the return value from JavaScript. + +```go +func main() { + r := rt.New() + doWasm := r.Register("hello-quickjs", rwasm.NewRunner("./rs_embed_js.wasm")) + + code, err := ioutil.ReadFile(os.Args[1]) + if err != nil { + fmt.Print(err) + } + res, err := doWasm(code).Then() + if err != nil { + fmt.Println(err) + return + } + + fmt.Println(string(res.([]byte))) +} +``` + +Run the Go host application as follows. + +```bash +$ cd quickjs +$ go mod tidy +$ go run -tags wasmedge main.go hello.js +String(JsString(hello wasmedge)) +``` + +The printed result shows the type information of the string in Rust and Go APIs. You can strip out this information by changing the Rust or Go applications. + +### JavaScript example: Feature examples + +WasmEdge supports many advanced JavaScript features. For the next step, you could try our [React SSR example](https://github.com/second-state/wasmedge-quickjs/tree/main/example_js/react_ssr) to generate an HTML UI from a Reactr function! +You can just build the `dist/main.js` from the React SSR example, and copy it over to this example folder to see it in action! + +```bash +$ cd quickjs +# copy over the dist/main.js file from the react ssr example +$ go mod tidy +$ go run -tags wasmedge main.go main.js +
This is home
This is page
+UnDefined +``` diff --git a/docs/embed/use-case/serverless/_category_.json b/docs/embed/use-case/serverless/_category_.json new file mode 100644 index 000000000..53e7dfdd2 --- /dev/null +++ b/docs/embed/use-case/serverless/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "Serviceless Platforms", + "position": 9, + "link": { + "type": "generated-index", + "description": "Run WebAssembly as an alternative lightweight runtime side-by-side with Docker and microVMs in cloud native infrastructure" + } +} diff --git a/docs/embed/use-case/serverless/aws.md b/docs/embed/use-case/serverless/aws.md new file mode 100644 index 000000000..aaa628461 --- /dev/null +++ b/docs/embed/use-case/serverless/aws.md @@ -0,0 +1,266 @@ +--- +sidebar_position: 1 +--- + +# WebAssembly Serverless Functions in AWS Lambda + +In this article, we will show you two serverless functions in Rust and WasmEdge deployed on AWS Lambda. One is the image processing function, the other one is the TensorFlow inference function. + +> For the insight on why WasmEdge on AWS Lambda, please refer to the article [WebAssembly Serverless Functions in AWS Lambda](https://www.secondstate.io/articles/webassembly-serverless-functions-in-aws-lambda/) + +## Prerequisites + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on AWS Lambda. We will assume that you already have the basic knowledge of how to work with Next.js and Lambda. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://second-state.github.io/aws-lambda-wasm-runtime/) deployed through GitHub Pages is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/aws-lambda-wasm-runtime) to get started. To deploy the application on AWS Lambda, follow the guide in the repository [README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md). + +### Create the function + +This repo is a standard Next.js application. The backend serverless function is in the `api/functions/image_grayscale` folder. The `src/main.rs` file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> When we build the docker image, `api/pre.sh` is executed. `pre.sh` installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +### Create the service script to load the function + +The [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice that [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const { spawn } = require('child_process'); +const path = require('path'); + +function _runWasm(reqBody) { + return new Promise(resolve => { + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [path.join(__dirname, 'grayscale.so')]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + resolve(buf); + }); + + wasmedge.stdin.write(reqBody); + wasmedge.stdin.end(''); + }); +} +``` + +The `exports.handler` part of `hello.js` exports an async function handler, used to handle different events every time the serverless function is called. In this example, we simply process the image by calling the function above and return the result, but more complicated event-handling behavior may be defined based on your need. We also need to return some `Access-Control-Allow` headers to avoid [Cross-Origin Resource Sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) errors when calling the serverless function from a browser. You can read more about CORS errors [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS/Errors) if you encounter them when replicating our example. + +```javascript +exports.handler = async function(event, context) { + var typedArray = new Uint8Array(event.body.match(/[\da-f]{2}/gi).map(function (h) { + return parseInt(h, 16); + })); + let buf = await _runWasm(typedArray); + return { + statusCode: 200, + headers: { + "Access-Control-Allow-Headers" : "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT" + }, + body: buf.toString('hex') + }; +} +``` + +### Build the Docker image for Lambda deployment + +Now we have the WebAssembly bytecode function and the script to load and connect to the web request. In order to deploy them as a function service on AWS Lambda, you still need to package the whole thing into a Docker image. + +We are not going to cover in detail about how to build the Docker image and deploy on AWS Lambda, as there are detailed steps in the [Deploy section of the repository README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md#deploy). However, we will highlight some lines in the [`Dockerfile`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/Dockerfile) for you to avoid some pitfalls. + +```dockerfile +FROM public.ecr.aws/lambda/nodejs:14 + +# Change directory to /var/task +WORKDIR /var/task + +RUN yum update -y && yum install -y curl tar gzip + +# Bundle and pre-compile the wasm files +COPY *.wasm ./ +COPY pre.sh ./ +RUN chmod +x pre.sh +RUN ./pre.sh + +# Bundle the JS files +COPY *.js ./ + +CMD [ "hello.handler" ] +``` + +First, we are building the image from [AWS Lambda's Node.js base image](https://hub.docker.com/r/amazon/aws-lambda-nodejs). The advantage of using AWS Lambda's base image is that it includes the [Lambda Runtime Interface Client (RIC)](https://github.com/aws/aws-lambda-nodejs-runtime-interface-client), which we need to implement in our Docker image as it is required by AWS Lambda. The Amazon Linux uses `yum` as the package manager. + +> These base images contain the Amazon Linux Base operating system, the runtime for a given language, dependencies and the Lambda Runtime Interface Client (RIC), which implements the Lambda [Runtime API](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-api.html). The Lambda Runtime Interface Client allows your runtime to receive requests from and send requests to the Lambda service. + +Second, we need to put our function and all its dependencies in the `/var/task` directory. Files in other folders will not be executed by AWS Lambda. + +Third, we need to define the default command when we start our container. `CMD [ "hello.handler" ]` means that we will call the `handler` function in `hello.js` whenever our serverless function is called. Recall that we have defined and exported the handler function in the previous steps through `exports.handler = ...` in `hello.js`. + +### Optional: test the Docker image locally + +Docker images built from AWS Lambda's base images can be tested locally following [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-test.html). Local testing requires [AWS Lambda Runtime Interface Emulator (RIE)](https://github.com/aws/aws-lambda-runtime-interface-emulator), which is already installed in all of AWS Lambda's base images. To test your image, first, start the Docker container by running: + +```bash +docker run -p 9000:8080 myfunction:latest +``` + +This command sets a function endpoint on your local machine at `http://localhost:9000/2015-03-31/functions/function/invocations`. + +Then, from a separate terminal window, run: + +```bash +curl -XPOST "http://localhost:9000/2015-03-31/functions/function/invocations" -d '{}' +``` + +And you should get your expected output in the terminal. + +If you don't want to use a base image from AWS Lambda, you can also use your own base image and install RIC and/or RIE while building your Docker image. Just follow **Create an image from an alternative base image** section from [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-create.html). + +That's it! After building your Docker image, you can deploy it to AWS Lambda following steps outlined in the repository [README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md#deploy). Now your serverless function is ready to rock! + +## Example 2: AI inference + +The [second demo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/tensorflow) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the `api/functions/image-classification` folder in the `tensorflow` branch. The `src/main.rs` file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the `api/pre.sh` script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. The handler function is similar to our previous example, and is omitted here. + +```javascript +const { spawn } = require('child_process'); +const path = require('path'); + +function _runWasm(reqBody) { + return new Promise(resolve => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + {env: {'LD_LIBRARY_PATH': __dirname}} + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + resolve(d.join('')); + }); + + wasmedge.stdin.write(reqBody); + wasmedge.stdin.end(''); + }); +} + +exports.handler = ... // _runWasm(reqBody) is called in the handler +``` + +You can build your Docker image and deploy the function in the same way as outlined in the previous example. Now you have created a web app for subject classification! + +Next, it's your turn to use the [aws-lambda-wasm-runtime repo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/main) as a template to develop Rust serverless function on AWS Lambda. Looking forward to your great work. diff --git a/docs/embed/use-case/serverless/netlify.md b/docs/embed/use-case/serverless/netlify.md new file mode 100644 index 000000000..45894c3b0 --- /dev/null +++ b/docs/embed/use-case/serverless/netlify.md @@ -0,0 +1,189 @@ +--- +sidebar_position: 2 +--- + +# WebAssembly Serverless Functions in Netlify + +In this article we will show you two serverless functions in Rust and WasmEdge deployed on Netlify. One is the image processing function, the other one is the TensorFlow inference function. + +> For more insights on why WasmEdge on Netlify, please refer to the article [WebAssembly Serverless Functions in Netlify](https://www.secondstate.io/articles/netlify-wasmedge-webassembly-rust-serverless/). + +## Prerequisite + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on Netlify. We will assume that you already have the basic knowledge of how to work with Next.js and Netlify. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://60fe22f9ff623f0007656040--reverent-hodgkin-dc1f51.netlify.app/) deployed on Netlify is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/netlify-wasm-runtime) to get started. To deploy the application on Netlify, just [add your github repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/). + +This repo is a standard Next.js application for the Netlify platform. The backend serverless function is in the [`api/functions/image_grayscale`](https://github.com/second-state/netlify-wasm-runtime/tree/main/api/functions/image-grayscale) folder. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/functions/image-grayscale/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> The Netlify function runs [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/pre.sh) upon setting up the serverless environment. It installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +The [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge'), + [path.join(__dirname, 'grayscale.so')]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + + res.setHeader('Content-Type', req.headers['image-type']); + res.send(buf); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +} +``` + +That's it. [Deploy the repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and you now have a Netlify Jamstack app with a high-performance Rust and WebAssembly based serverless backend. + +## Example 2: AI inference + +The [second demo](https://60ff7e2d10fe590008db70a9--reverent-hodgkin-dc1f51.netlify.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/pre.sh) script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + {env: {'LD_LIBRARY_PATH': __dirname}} + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + res.setHeader('Content-Type', `text/plain`); + res.send(d.join('')); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +} +``` + +You can now [deploy your forked repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and have a web app for subject classification. + +Next, it's your turn to develop Rust serverless functions in Netlify using the [netlify-wasm-runtime repo](https://github.com/second-state/netlify-wasm-runtime) as a template. Looking forward to your great work. diff --git a/docs/embed/use-case/serverless/secondstate.md b/docs/embed/use-case/serverless/secondstate.md new file mode 100644 index 000000000..e41be4cd4 --- /dev/null +++ b/docs/embed/use-case/serverless/secondstate.md @@ -0,0 +1,18 @@ +--- +sidebar_position: 3 +--- + +# Second State Functions + +Second State Functions, powered by WasmEdge, supports the Rust language as a first class citizen. + +It could + +* [Handle text-based input and output](https://www.secondstate.io/articles/getting-started-with-function-as-a-service-in-rust/) +* [Use Binary data as function input and output](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) +* [Mix bytes and strings in function argument and return value](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) +* [Use webhooks as function input and output](https://www.secondstate.io/articles/internet-of-functions-webhooks/) +* [Access internet resources via a `http_proxy` API](https://www.secondstate.io/articles/internet-of-functions-http-proxy/) +* [Running TensorFlow models at native speed via the WasmEdge TensorFlow API](https://www.secondstate.io/articles/wasi-tensorflow/) + +Check out the [Second State Functions](https://www.secondstate.io/faas/) website for more tutorials. diff --git a/docs/embed/use-case/serverless/tencent.md b/docs/embed/use-case/serverless/tencent.md new file mode 100644 index 000000000..9937f7149 --- /dev/null +++ b/docs/embed/use-case/serverless/tencent.md @@ -0,0 +1,11 @@ +--- +sidebar_position: 4 +--- + +# WebAssembly serverless functions on Tencent Cloud + +As the main users of Tencent Cloud are from China, so the tutorial is [written in Chinese](https://my.oschina.net/u/4532842/blog/5172639). + +We also provide a code template for deploying serverless WebAssembly functions on Tencent Cloud, please check out [the tencent-scf-wasm-runtime repo](https://github.com/second-state/tencent-scf-wasm-runtime). + +Fork the repo and start writing your own rust functions. diff --git a/docs/embed/use-case/serverless/vercel.md b/docs/embed/use-case/serverless/vercel.md new file mode 100644 index 000000000..21858e0b2 --- /dev/null +++ b/docs/embed/use-case/serverless/vercel.md @@ -0,0 +1,191 @@ +--- +sidebar_position: 5 +--- + +# Rust and WebAssembly Serverless functions in Vercel + +In this article, we will show you two serverless functions in Rust and WasmEdge deployed on Vercel. One is the image processing function, the other one is the TensorFlow inference function. + +> For more insights on why WasmEdge on Vercel, please refer to the article [Rust and WebAssembly Serverless Functions in Vercel](https://www.secondstate.io/articles/vercel-wasmedge-webassembly-rust/). + +## Prerequisite + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on Vercel. We will assume that you already have the basic knowledge of how to work with Vercel. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://vercel-wasm-runtime.vercel.app/) deployed on Vercel is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/vercel-wasm-runtime) to get started. To deploy the application on Vercel, just [import the Github repo](https://vercel.com/docs/git#deploying-a-git-repository) from [Vercel for Github](https://vercel.com/docs/git/vercel-for-github) web page. + +This repo is a standard Next.js application for the Vercel platform. The backend serverless function is in the [`api/functions/image_grayscale`](https://github.com/second-state/vercel-wasm-runtime/tree/main/api/functions/image-grayscale) folder. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/functions/image-grayscale/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> Vercel runs [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/pre.sh) upon setting up the serverless environment. It installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +The [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/hello.js) file conforms Vercel serverless specification. It loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge'), + [path.join(__dirname, 'grayscale.so')]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + + res.setHeader('Content-Type', req.headers['image-type']); + res.send(buf); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +} +``` + +That's it. [Deploy the repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and you now have a Vercel Jamstack app with a high-performance Rust and WebAssembly based serverless backend. + +## Example 2: AI inference + +The [second demo](https://vercel-wasm-runtime.vercel.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/vercel-wasm-runtime) as the previous example but in the `tensorflow` branch. Note: when you [import this GitHub repo](https://vercel.com/docs/git#deploying-a-git-repository) on the Vercel website, it will create a [preview URL](https://vercel.com/docs/platform/deployments#preview) for each branch. The `tensorflow` branch would have its own deployment URL. + +The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/vercel-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/pre.sh) script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/hello.js) file conforms Vercel serverless specification. It loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + {env: {'LD_LIBRARY_PATH': __dirname}} + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + res.setHeader('Content-Type', `text/plain`); + res.send(d.join('')); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +} +``` + +You can now [deploy your forked repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and have a web app for subject classification. + +Next, it's your turn to use [the vercel-wasm-runtime repo](https://github.com/second-state/vercel-wasm-runtime) as a template to develop your own Rust serverless functions in Vercel. Looking forward to your great work. diff --git a/docs/embed/use-case/wasm-smart-devices.md b/docs/embed/use-case/wasm-smart-devices.md index a69dbfeb1..17cd9ad77 100644 --- a/docs/embed/use-case/wasm-smart-devices.md +++ b/docs/embed/use-case/wasm-smart-devices.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 4 --- # WasmEdge On Smart Devices diff --git a/docs/embed/use-case/web-app.md b/docs/embed/use-case/web-app.md new file mode 100644 index 000000000..05036703a --- /dev/null +++ b/docs/embed/use-case/web-app.md @@ -0,0 +1,101 @@ +--- +sidebar_position: 9 +--- + +# A simple WebAssembly example + +In this article, I will show you how to build a container image for a WebAssembly application. It can then be started and managed by Kubernetes ecosystem tools, such as CRI-O, Docker, crun, and Kubernetes. + +## Prerequisites + +> If you simply want a wasm bytecode file to test as a container image, you can skip the building process and just [download the wasm file here](https://github.com/second-state/wasm-learning/blob/master/cli/wasi/wasi_example_main.wasm). + +If you have not done so already, follow these simple instructions to [install Rust](https://www.rust-lang.org/tools/install). + +## Download example code + +```bash +git clone https://github.com/second-state/wasm-learning +cd wasm-learning/cli/wasi +``` + +## Build the WASM bytecode + +```bash +rustup target add wasm32-wasi +cargo build --target wasm32-wasi --release +``` + +The wasm bytecode application is in the `target/wasm32-wasi/release/wasi_example_main.wasm` file. You can now publish and use it as a container image. + +## Apply executable permission on the Wasm bytecode + +```bash +chmod +x target/wasm32-wasi/release/wasi_example_main.wasm +``` + +## Create Dockerfile + +Create a file called `Dockerfile` in the `target/wasm32-wasi/release/` folder with the following content: + +```dockerfile +FROM scratch +ADD wasi_example_main.wasm / +CMD ["/wasi_example_main.wasm"] +``` + +## Create container image with annotations + +> Please note that adding self-defined annotation is still a new feature in buildah. + +The `crun` container runtime can start the above WebAssembly-based container image. But it requires the `module.wasm.image/variant=compat-smart` annotation on the container image to indicate that it is a WebAssembly application without a guest OS. You can find the details in [Official crun repo](https://github.com/containers/crun/blob/main/docs/wasm-wasi-example.md). + +To add `module.wasm.image/variant=compat-smart` annotation in the container image, you will need the latest [buildah](https://buildah.io/). Currently, Docker does not support this feature. Please follow [the install instructions of buildah](https://github.com/containers/buildah/blob/main/install.md) to build the latest buildah binary. + +### Build and install the latest buildah on Ubuntu + +On Ubuntu zesty and xenial, use these commands to prepare for buildah. + +```bash +sudo apt-get -y install software-properties-common + +export OS="xUbuntu_20.04" +sudo bash -c "echo \"deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/ /\" > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list" +sudo bash -c "curl -L https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/Release.key | apt-key add -" + +sudo add-apt-repository -y ppa:alexlarsson/flatpak +sudo apt-get -y -qq update +sudo apt-get -y install bats git libapparmor-dev libdevmapper-dev libglib2.0-dev libgpgme-dev libseccomp-dev libselinux1-dev skopeo-containers go-md2man containers-common +sudo apt-get -y install golang-1.16 make +``` + +Then, follow these steps to build and install buildah on Ubuntu. + +```bash +mkdir -p ~/buildah +cd ~/buildah +export GOPATH=`pwd` +git clone https://github.com/containers/buildah ./src/github.com/containers/buildah +cd ./src/github.com/containers/buildah +PATH=/usr/lib/go-1.16/bin:$PATH make +cp bin/buildah /usr/bin/buildah +buildah --help +``` + +### Create and publish a container image with buildah + +In the `target/wasm32-wasi/release/` folder, do the following. + +```bash +$ sudo buildah build --annotation "module.wasm.image/variant=compat-smart" -t wasm-wasi-example . +# make sure docker is install and running +# systemctl status docker +# to make sure regular user can use docker +# sudo usermod -aG docker $USER +# newgrp docker + +# You may need to use docker login to create the `~/.docker/config.json` for auth. +$ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker://docker.io/wasmedge/example-wasi:latest +``` + +That's it! Now you can try to run it in [CRI-O](../cri/crio.md#run-a-simple-webassembly-app) or [Kubernetes](../kubernetes/kubernetes-crio.md#a-simple-webassembly-app)! diff --git a/docs/start/wasmedge/comparison.md b/docs/start/wasmedge/comparison.md new file mode 100644 index 000000000..23c93a001 --- /dev/null +++ b/docs/start/wasmedge/comparison.md @@ -0,0 +1,29 @@ +--- +sidebar_position: 5 +--- + +# Comparison + +## What's the relationship between WebAssembly and Docker? + +Check out our infographic [WebAssembly vs. Docker](https://wasmedge.org/wasm_docker/). WebAssembly runs side by side with Docker in cloud native and edge native applications. + +## What's the difference for Native clients (NaCl), Application runtimes, and WebAssembly? + +We created a handy table for the comparison. + +| | NaCl | Application runtimes (eg Node & Python) | Docker-like container | WebAssembly | +| --- | --- | --- | --- | --- | +| Performance | Great | Poor | OK | Great | +| Resource footprint | Great | Poor | Poor | Great | +| Isolation | Poor | OK | OK | Great | +| Safety | Poor | OK | OK | Great | +| Portability | Poor | Great | OK | Great | +| Security | Poor | OK | OK | Great | +| Language and framework choice | N/A | N/A | Great | OK | +| Ease of use | OK | Great | Great | OK | +| Manageability | Poor | Poor | Great | Great | + +## What's the difference between WebAssembly and eBPF? + +`eBPF` is the bytecode format for a Linux kernel space VM that is suitable for network or security related tasks. WebAssembly is the bytecode format for a user space VM that is suited for business applications. [See details here](https://medium.com/codex/ebpf-and-webassembly-whose-vm-reigns-supreme-c2861ce08f89). \ No newline at end of file diff --git a/docu.js b/docu.js new file mode 100644 index 000000000..0e17036ed --- /dev/null +++ b/docu.js @@ -0,0 +1,453 @@ +// @ts-check + +const translations = require('@verdaccio/crowdin-translations/build/progress_lang.json'); + +const lgnMapping = { + 'de-DE': 'de', + 'pl-PL': 'pl', + 'cs-CZ': 'cs', + 'fr-FR': 'fr', + 'it-IT': 'it', + 'ru-RU': 'ru', + 'vi-VN': 'vi', + 'yo-NG': 'yo', +}; + +// @ts-ignore +const progress = translations; +const limitLngIncluded = 19; +console.log('limit translation is on %s%', limitLngIncluded); +const isDeployPreview = process.env.CONTEXT === 'deploy-preview'; +const isProductionDeployment = process.env.CONTEXT === 'production'; +const filterByProgress = (items) => { + const originLng = Object.keys(translations); + return items.filter((lgn) => { + if (lgn === 'en') { + return true; + } + const _lgn = lgnMapping[lgn] ? lgnMapping[lgn] : lgn; + if (!originLng.includes(_lgn)) { + console.log(`language ${_lgn} excluded, does not exist in origin`); + return false; + } + + if (translations[_lgn].approvalProgress <= limitLngIncluded) { + console.log( + 'language %s is being excluded due does not met limit of translation, current: %s%', + _lgn, + translations[_lgn].approvalProgress + ); + return false; + } + + return true; + }); +}; + +const i18nConfig = { + defaultLocale: 'en', + locales: isDeployPreview + ? ['en'] + : filterByProgress([ + 'en', + 'cs-CZ', + 'de-DE', + 'es-ES', + 'fr-FR', + 'it-IT', + 'pl-PL', + 'pt-BR', + 'ru-RU', + 'sr-CS', + 'vi-VN', + 'yo-NG', + 'zh-TW', + 'zh-CN', + ]), + localeConfigs: { + en: { label: 'English' }, + 'it-IT': { label: `Italiano (${progress['it'].translationProgress}%)` }, + 'es-ES': { label: `Español (${progress['es-ES'].translationProgress}%)` }, + 'de-DE': { label: `Deutsch (${progress['de'].translationProgress}%)` }, + 'cs-CZ': { label: `Čeština (Česko) (${progress['cs'].translationProgress}%)` }, + 'fr-FR': { label: `Français (${progress['fr'].translationProgress}%)` }, + 'pl-PL': { label: `Polski (Polska) (${progress['pl'].translationProgress}%)` }, + 'pt-BR': { label: `Português (Brasil) (${progress['pt-BR'].translationProgress}%)` }, + 'ru-RU': { label: `Русский (Россия) (${progress['ru'].translationProgress}%)` }, + 'zh-CN': { label: `中文(中国)(${progress['zh-CN'].translationProgress}%)` }, + 'zh-TW': { label: `中文(台灣)(${progress['zh-TW'].translationProgress}%)` }, + 'yo-NG': { label: `Èdè Yorùbá (Nàìjíríà) (${progress['yo'].translationProgress}%)` }, + 'sr-CS': { label: `Српски (Србија) (${progress['sr-CS'].translationProgress}%)` }, + 'vi-VN': { label: `Tiếng Việt (Việt Nam) (${progress['vi'].translationProgress}%)` }, + }, +}; + +const pkgJson = require('./package.json'); + +module.exports = { + title: 'Verdaccio', + tagline: 'A lightweight Node.js private proxy registry', + organizationName: 'verdaccio', + projectName: 'verdaccio', + url: 'https://verdaccio.org', + baseUrl: '/', + onBrokenLinks: 'throw', + onBrokenMarkdownLinks: 'warn', + favicon: 'img/logo/uk/verdaccio-tiny-uk-no-bg.svg', + i18n: i18nConfig, + scripts: ['https://buttons.github.io/buttons.js'], + plugins: [ + 'docusaurus-plugin-sass', + 'docusaurus-plugin-contributors', + isProductionDeployment && + typeof process.env.SENTRY_KEY === 'string' && [ + 'docusaurus-plugin-sentry', + { DSN: process.env.SENTRY_KEY }, + ], + [ + 'docusaurus-plugin-typedoc', + { + entryPoints: ['../packages/node-api/src/index.ts'], + tsconfig: '../packages/node-api/tsconfig.build.json', + id: 'api/node-api', + out: 'api/node-api', + // theme: 'default', + excludePrivate: false, + excludeProtected: true, + categorizeByGroup: false, + excludeInternal: true, + sidebar: { + categoryLabel: '@verdaccio/node-api', + // position: 1, + fullNames: true, + }, + }, + ], + [ + 'content-docs', + { + id: 'community', + path: 'community', + routeBasePath: 'community', + sidebarPath: require.resolve('./sidebarsCommunity.js'), + showLastUpdateTime: true, + }, + ], + [ + 'content-docs', + { + id: 'dev', + path: 'dev', + routeBasePath: 'dev', + sidebarPath: require.resolve('./sidebarsDev.js'), + showLastUpdateTime: true, + }, + ], + [ + 'content-docs', + { + id: 'talks', + path: 'talks', + routeBasePath: 'talks', + sidebarPath: require.resolve('./sidebarsTalk.js'), + showLastUpdateTime: true, + }, + ], + [ + 'docusaurus-plugin-typedoc', + { + entryPoints: ['../packages/config/src/index.ts'], + tsconfig: '../packages/config/tsconfig.build.json', + id: 'api/config', + out: 'api/config', + sidebar: { + categoryLabel: '@verdaccio/config', + fullNames: true, + }, + }, + ], + [ + 'docusaurus-plugin-typedoc', + { + entryPoints: ['../packages/ui-components/src/index.ts'], + tsconfig: '../packages/ui-components/tsconfig.build.json', + id: 'api/ui-components', + out: 'api/ui-components', + sidebar: { + categoryLabel: '@verdaccio/ui-components', + fullNames: true, + watch: process.env.TYPEDOC_WATCH, + }, + }, + ], + [ + 'docusaurus-plugin-typedoc', + { + entryPoints: ['../packages/core/core/src/index.ts'], + tsconfig: '../packages/core/core/tsconfig.build.json', + id: 'api/core', + out: 'api/core', + sidebar: { + categoryLabel: '@verdaccio/core', + fullNames: true, + }, + }, + ], + [ + 'docusaurus-plugin-typedoc', + { + entryPoints: ['../packages/core/types/src/types.ts'], + tsconfig: '../packages/core/types/tsconfig.build.json', + id: 'api/types', + out: 'api/types', + categorizeByGroup: false, + includeVersion: true, + sidebar: { + categoryLabel: '@verdaccio/types', + fullNames: true, + }, + }, + ], + ], + markdown: { + mermaid: true, + }, + themes: ['@docusaurus/theme-mermaid'], + webpack: { + jsLoader: (isServer) => ({ + loader: require.resolve('esbuild-loader'), + options: { + loader: 'tsx', + format: isServer ? 'cjs' : undefined, + target: isServer ? 'node12' : 'es2017', + }, + }), + }, + customFields: { + description: 'A lightweight Node.js private proxy registry', + }, + themeConfig: { + mermaid: { + theme: { light: 'neutral', dark: 'forest' }, + }, + announcementBar: { + id: 'announcementBar', + content: + 'Help provide humanitarian support to Ukraine refugees!', + isCloseable: false, + backgroundColor: '#1595de', + textColor: '#ffffff', + }, + algolia: { + appId: 'B3TG5CBF5H', + apiKey: 'ed054733cb03418e9af25b7beb82c924', + indexName: 'verdaccio', + contextualSearch: true, + }, + docs: { + sidebar: { + hideable: true, + autoCollapseCategories: true, + }, + }, + navbar: { + title: `Verdaccio - v5.x`, + logo: { + alt: 'Verdaccio Logo', + src: 'img/logo/uk/verdaccio-tiny-uk-no-bg.svg', + }, + items: [ + { + type: 'doc', + docId: 'what-is-verdaccio', + position: 'left', + label: 'Docs', + }, + { + type: 'doc', + docId: 'api/node-api/index', + position: 'left', + label: 'API', + }, + { to: '/blog', label: 'Blog', position: 'left' }, + { + type: 'docsVersionDropdown', + position: 'right', + }, + { + href: 'https://opencollective.com/verdaccio', + label: 'Sponsor us', + position: 'right', + }, + { + href: '/community', + label: 'Community', + position: 'left', + }, + { + href: '/talks', + label: 'Video Talks', + position: 'left', + }, + { + type: 'localeDropdown', + position: 'right', + dropdownItemsAfter: [ + { + href: 'https://crowdin.com/project/verdaccio', + label: 'Help Us Translate', + }, + ], + }, + { + href: 'https://github.com/verdaccio/verdaccio', + position: 'right', + className: 'header-github-link', + 'aria-label': 'GitHub repository', + }, + { + href: 'https://fosstodon.org/@verdaccio', + position: 'right', + className: 'header-mastodon-link', + 'aria-label': 'Follow us at Fosstodon', + }, + ], + }, + footer: { + style: 'dark', + links: [ + { + title: 'Docs', + items: [ + { + label: 'Getting Started', + to: '/docs/what-is-verdaccio', + }, + { + label: 'Docker', + to: '/docs/docker', + }, + { + label: 'Configuration', + to: '/docs/configuration', + }, + { + label: 'Logos', + to: '/docs/logo', + }, + ], + }, + { + title: 'Community', + items: [ + { + label: 'Stack Overflow', + href: 'https://stackoverflow.com/questions/tagged/verdaccio', + }, + { + label: 'Discord', + href: 'https://discord.gg/7qWJxBf', + }, + { + html: ` + + Mastodon + + `, + }, + ], + }, + { + title: 'More', + items: [ + { + label: 'Blog', + to: '/blog', + }, + { + label: 'GitHub', + href: 'https://github.com/verdaccio/verdaccio', + }, + { + label: 'Mastodon', + href: 'https://fosstodon.org/@verdaccio', + }, + { + html: ` + + Deploys by Netlify + + `, + }, + ], + }, + ], + copyright: `Copyright © ${new Date().getFullYear()} Verdaccio community. Built with Docusaurus.`, + }, + colorMode: { + defaultMode: 'light', + disableSwitch: false, + respectPrefersColorScheme: true, + }, + prism: { + theme: require('prism-react-renderer/themes/github'), + darkTheme: require('prism-react-renderer/themes/nightOwl'), + }, + }, + presets: [ + [ + '@docusaurus/preset-classic', + { + docs: { + sidebarPath: require.resolve('./sidebars.js'), + showLastUpdateAuthor: true, + showLastUpdateTime: true, + sidebarCollapsible: true, + remarkPlugins: [[require('@docusaurus/remark-plugin-npm2yarn'), { sync: true }]], + editUrl: ({ locale, docPath }) => { + if (locale !== 'en') { + return `https://crowdin.com/project/verdaccio/${locale}`; + } + return `https://github.com/verdaccio/verdaccio/edit/master/website/docs/${docPath}`; + }, + lastVersion: '5.x', + versions: { + current: { + label: `6.x`, + }, + '5.x': { + label: `5.x (Latest)`, + }, + }, + }, + googleAnalytics: { + // trackingID: 'UA-2527438-21' + trackingID: 'G-PCYM9FYJZT', + }, + gtag: { + trackingID: 'G-PCYM9FYJZT', + }, + blog: { + blogTitle: 'Verdaccio Official Blog', + blogDescription: 'The official Verdaccio Node.js proxy registry blog', + showReadingTime: true, + postsPerPage: 3, + feedOptions: { + type: 'all', + }, + blogSidebarCount: 'ALL', + blogSidebarTitle: 'All our posts', + editUrl: ({ locale, blogDirPath, blogPath }) => { + if (locale !== 'en') { + return `https://crowdin.com/project/verdaccio/${locale}`; + } + return `https://github.com/verdaccio/verdaccio/edit/master/website/${blogDirPath}/${blogPath}`; + }, + }, + theme: { + customCss: require.resolve('./src/css/custom.scss'), + }, + }, + ], + ], +}; \ No newline at end of file diff --git a/docusaurus.config.js b/docusaurus.config.js index dfd57902c..8bf570c1d 100644 --- a/docusaurus.config.js +++ b/docusaurus.config.js @@ -79,12 +79,24 @@ const config = { }, ], ], - + themeConfig: /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ ({ - metadata: [{ name: 'keywords', content: 'wasmedge, wasm, web assembly, rust, cncf, edge devices, cloud, serverless' }, { name: 'twitter:card', content: 'summary' }], + metadata: [ + { name: 'keywords', content: 'wasmedge, wasm, web assembly, rust, cncf, edge devices, cloud, serverless' }, + { name: 'description', content: 'WasmEdge is a lightweight, high-performance, and extensible WebAssembly runtime for cloud native, edge, and decentralized applications. It powers serverless apps, embedded functions, microservices, smart contracts, and IoT devices.' }, + { name: 'og:title', content: 'WasmEdge' }, + { name: 'og:description', content: 'WasmEdge is a lightweight, high-performance, and extensible WebAssembly runtime for cloud native, edge, and decentralized applications. It powers serverless apps, embedded functions, microservices, smart contracts, and IoT devices.' }, + { name: 'og:url', content: 'https://wasmedge.org/' }, + { name: 'og:type', content: 'Documentation' }, + { name: 'twitter:card', content: 'summary' }, + { name: 'twitter:image', content: 'summary_large_image' }, + { name: 'twitter:url', content: 'https://wasmedge.org/' }, + { name: 'twitter:site', content: '@realwasmedge' }, + { name: 'twitter:title', content: 'WasmEdge' } + ], image: "./static/img/wasm_logo.png", announcementBar: { id: "start", @@ -128,6 +140,7 @@ const config = { href: 'https://github.com/WasmEdge/WasmEdge', className: "header-github-link", position: 'right', + alt: 'https://github.com/WasmEdge/WasmEdge' }, ], }, @@ -222,4 +235,4 @@ const extendedConfig = { } }; -module.exports = extendedConfig; +module.exports = extendedConfig; \ No newline at end of file diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/develop/deploy/cri-runtime/containerd.md b/i18n/zh/docusaurus-plugin-content-docs/current/develop/deploy/cri-runtime/containerd.md index 002cb7985..5019e2d47 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/develop/deploy/cri-runtime/containerd.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/develop/deploy/cri-runtime/containerd.md @@ -2,7 +2,7 @@ sidebar_position: 1 --- -# 8.6.1 Deploy with containerd's runwasi +# Deploy with containerd's runwasi :::info diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/c++/intro.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/c++/intro.md index 8c78bf19e..e6b7af9f5 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/c++/intro.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/c++/intro.md @@ -4,7 +4,96 @@ sidebar_position: 1 # WasmEdge C++ SDK Introduction - -:::info -Work in Progress -::: +The WasmEdge C++ SDK is a collection of headers and libraries that allow you to build and deploy WebAssembly (Wasm) modules for execution on WasmEdge devices. It includes a CMake project and a set of command-line tools that you can use to build and deploy your Wasm modules. + +## Quick Start Guide + +To get started with WasmEdge, follow these steps: + +Install the WasmEdge C/C++ SDK: Download C++ SDK from the WasmEdge [website](https://wasmedge.org/docs/embed/quick-start/install) and follow the instructions to install it on your development machine + +```cpp +#include +#include + +int main(int argc, char** argv) { + /* Create the configure context and add the WASI support. */ + /* This step is not necessary unless you need WASI support. */ + WasmEdge_ConfigureContext* conf_cxt = WasmEdge_ConfigureCreate(); + WasmEdge_ConfigureAddHostRegistration(conf_cxt, WasmEdge_HostRegistration_Wasi); + /* The configure and store context to the VM creation can be NULL. */ + WasmEdge_VMContext* vm_cxt = WasmEdge_VMCreate(conf_cxt, nullptr); + + /* The parameters and returns arrays. */ + WasmEdge_Value params[1] = { WasmEdge_ValueGenI32(40) }; + WasmEdge_Value returns[1]; + /* Function name. */ + WasmEdge_String func_name = WasmEdge_StringCreateByCString("fib"); + /* Run the WASM function from file. */ + WasmEdge_Result res = WasmEdge_VMRunWasmFromFile(vm_cxt, argv[1], func_name, params, 1, returns, 1); + + if (WasmEdge_ResultOK(res)) { + std::cout << "Get result: " << WasmEdge_ValueGetI32(returns[0]) << std::endl; + } else { + std::cout << "Error message: " << WasmEdge_ResultGetMessage(res) << std::endl; + } + + /* Resources deallocations. */ + WasmEdge_VMDelete(vm_cxt); + WasmEdge_ConfigureDelete(conf_cxt); + WasmEdge_StringDelete(func_name); + return 0; +} +``` + +You can use the -I flag to specify the include directories and the -L and -l flags to specify the library directories and library names, respectively. +Then you can compile the code and run: ( the 40th fibonacci number is 102334155) + +```bash +gcc example.cpp -x c++ -I/path/to/wasmedge/include -L/path/to/wasmedge/lib -lwasmedge -o example +``` + +To run the `example` executable that was created in the previous step, you can use the following command + +```bash +./example +``` + +## Quick Start Guide in AOT compiler + +```cpp +#include +#include + +int main(int argc, const char* argv[]) { + // Create the configure context and add the WASI support. + // This step is not necessary unless you need WASI support. + wasmedge_configure_context* conf_cxt = wasmedge_configure_create(); + wasmedge_configure_add_host_registration(conf_cxt, WASMEDGE_HOST_REGISTRATION_WASI); + + // Create the VM context in AOT mode. + wasmedge_vm_context* vm_cxt = wasmedge_vm_create_aot(conf_cxt, NULL); + + // The parameters and returns arrays. + wasmedge_value params[1] = { wasmedge_value_gen_i32(32) }; + wasmedge_value returns[1]; + // Function name. + wasmedge_string func_name = wasmedge_string_create_by_cstring("fib"); + // Run the WASM function from file. + wasmedge_result res = wasmedge_vm_run_wasm_from_file(vm_cxt, argv[1], func_name, params, 1, returns, 1); + + if (wasmedge_result_ok(res)) { + printf("Get result: %d\n", wasmedge_value_get_i32(returns[0])); + } else { + printf("Error message: %s\n", wasmedge_result_get_message(res)); + } + + // Resources deallocations. + wasmedge_vm_delete(vm_cxt); + wasmedge_configure_delete(conf_cxt); + wasmedge_string_delete(func_name); + return 0; +} +``` + +In this example, the wasmedge_vm_create_aot function is used to create a wasmedge_vm_context object in AOT mode, which is then passed as the second argument to the wasmedge_vm_run_wasm_from_file function to execute the Wasm module in AOT mode. \ No newline at end of file diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/_category_.json b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/_category_.json new file mode 100644 index 000000000..75cd42031 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "Service mesh and Runtimes", + "position": 8, + "link": { + "type": "generated-index", + "description": "WasmEdge could be a lightweight runtime for sidecar microservices and the API proxy as the Docker alternative." + } +} diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md new file mode 100644 index 000000000..4adf085d8 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md @@ -0,0 +1,263 @@ +--- +sidebar_position: 1 +--- + +# Dapr + +In this article, I will demonstrate how to use WasmEdge as a sidecar application runtime for Dapr. There are two ways to do this: + +* **Standalone WasmEdge** is the **recommended approach** is to write a microservice using [Rust](../../../write_wasm/rust/networking-nonblocking.md) or [JavaScript](../../../write_wasm/js/networking.md), and run it in WasmEdge. The WasmEdge application serves web requests and communicates with the sidecar via sockets using the Dapr API. In this case, we can [run WasmEdge as a managed container in k8s](../../kubernetes/quickstart.md). +* Alternatively, Embedded WasmEdge is to create a simple microservice in Rust or Go to listen for web requests and communicate with the Dapr sidecar. It passes the request data to a WasmEdge runtime for processing. The business logic of the microservice is a WebAssembly function created and deployed by an application developer. + +> While the first approach (running the entire microservice in WasmEdge) is much preferred, we are still working on a fully fledged Dapr SDKs for WasmEdge. You can track their progress in GitHub issues -- [Rust](https://github.com/WasmEdge/WasmEdge/issues/1571) and [JavaScript](https://github.com/WasmEdge/WasmEdge/issues/1572). + +## Quick start + +First you need to install [Dapr](https://docs.dapr.io/getting-started/install-dapr-cli) and [WasmEdge](../../../quick_start/install.md). [Go](https://golang.org/doc/install) and [Rust](https://www.rust-lang.org/tools/install) are optional for the standalone WasmEdge approach. However, they are required for the demo app since it showcases both standalone and embedded WasmEdge approaches. + +Fork or clone the demo application from Github. You can use this repo as your own application template. + +```bash +git clone https://github.com/second-state/dapr-wasm +```` + +The demo has 4 Dapr sidecar applications. The [web-port](https://github.com/second-state/dapr-wasm/tree/main/web-port) project provides a public web service for a static HTML page. This is the application’s UI. From the static HTML page, the user can select a microservice to turn an input image into grayscale. All 3 microsoervices below perform the same function. They are just implemented using different approaches. + +* **Standalone WasmEdge approach:** The [image-api-wasi-socket-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-wasi-socket-rs) project provides a standalone WasmEdge sidecar microservice that takes the input image and returns the grayscale image. The microservice is written in Rust and compiled into WebAssembly bytecode to run in WasmEdge. +* Embedded WasmEdge approach #1: The [image-api-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) project provides a simple Rust-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. +* Embedded WasmEdge approach #2: The [image-api-go](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) project provides a simple Go-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. + +You can follow the instructions in the [README](https://github.com/second-state/dapr-wasm/blob/main/README.md) to start the sidecar services. Here are commands to build the WebAssembly functions and start the sidecar services. The first set of commands deploy the static web page service and the standalone WasmEdge service written in Rust. It forms a complete application to turn an input image into grayscale. + +```bash +# Build and start the static HTML web page service for the UI and router for sending the uploaded image to the grayscale microservice +cd web-port +go build +./run_web.sh +cd ../ + +# Build the standalone image grayscale web service for WasmEdge +cd image-api-wasi-socket-rs +cargo build --target wasm32-wasi +cd ../ + +# Run the microservice as a Dapr sidecar app +cd image-api-wasi-socket-rs +./run_api_wasi_socket_rs.sh +cd ../ +``` + +The second set of commands create the alternative microservices for the embedded WasmEdge function. + +```bash +# Build the grayscale WebAssembly functions, and deploy them to the sidecar projects +cd functions/grayscale +./build.sh +cd ../../ + +# Build and start the Rust-based microservice for embedding the grayscale WasmEdge function +cd image-api-rs +cargo build --release +./run_api_rs.sh +cd ../ + +# Build and start the Go-based microservice for embedding the grayscale WasmEdge function +cd image-api-go +go build +./run_api_go.sh +cd ../ +``` + +Finally, you should be able to see the web UI in your browser. + +## Recommended: The standalone WasmEdge microservice in Rust + +The [standalone WasmEdge microservice](https://github.com/second-state/dapr-wasm/blob/main/image-api-wasi-socket-rs/src/main.rs) starts a non-blocking TCP server inside WasmEdge. The TCP server passes incoming requests to `handle_client()`, which passes HTTP requests to `handle_http()`, which calls `grayscale()` to process the image data in the request. + +```rust +fn main() -> std::io::Result<()> { + let port = std::env::var("PORT").unwrap_or(9005.to_string()); + println!("new connection at {}", port); + let listener = TcpListener::bind(format!("127.0.0.1:{}", port))?; + loop { + let _ = handle_client(listener.accept()?.0); + } +} + +fn handle_client(mut stream: TcpStream) -> std::io::Result<()> { + ... ... +} + +fn handle_http(req: Request>) -> bytecodec::Result> { + ... ... +} + +fn grayscale(image: &[u8]) -> Vec { + let detected = image::guess_format(&image); + let mut buf = vec![]; + if detected.is_err() { + return buf; + } + + let image_format_detected = detected.unwrap(); + let img = image::load_from_memory(&image).unwrap(); + let filtered = img.grayscale(); + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + } + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + } + }; + return buf; +} +``` + +> Work in progress: It will soon interact with the Dapr sidecar through the [WasmEdge Dapr SDK in Rust](https://github.com/WasmEdge/WasmEdge/issues/1571). + +Now, you can build the microservice. It is a simple matter of compiling from Rust to WebAssembly. + +```bash +cd image-api-wasi-socket-rs +cargo build --target wasm32-wasi +``` + +Deploy the WasmEdge microservice in Dapr as follows. + +```bash +dapr run --app-id image-api-wasi-socket-rs \ + --app-protocol http \ + --app-port 9005 \ + --dapr-http-port 3503 \ + --components-path ../config \ + --log-level debug \ + wasmedge ./target/wasm32-wasi/debug/image-api-wasi-socket-rs.wasm +``` + +## Alternative: The embedded WasmEdge microservices + +The embedded WasmEdge approach requires us to create a WebAssembly function for the business logic (image processing) first, and then embed it into simple Dapr microservices. + +### Rust function for image processing + +The [Rust function](https://github.com/second-state/dapr-wasm/blob/main/functions/grayscale/src/lib.rs) is simple. It uses the [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md) macro to makes it easy to call the function from a Go or Rust host embedding the WebAssembly function. It takes and returns base64 encoded image data for the web. + +```rust +#[wasmedge_bindgen] +pub fn grayscale(image_data: String) -> String { + let image_bytes = image_data.split(",").map(|x| x.parse::().unwrap()).collect::>(); + return grayscale::grayscale_internal(&image_bytes); +} +``` + +The Rust function that actually performs the task is as follows. + +```rust +pub fn grayscale_internal(image_data: &[u8]) -> String { + let image_format_detected: ImageFormat = image::guess_format(&image_data).unwrap(); + let img = image::load_from_memory(&image_data).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + } + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + } + }; + let mut base64_encoded = String::new(); + base64::encode_config_buf(&buf, base64::STANDARD, &mut base64_encoded); + return base64_encoded.to_string(); +} +``` + +### The Go host wrapper for microservice + +The [Go-based microservice](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) embeds the above imaging processing function in WasmEdge. The [microservice itself](https://github.com/second-state/dapr-wasm/blob/main/image-api-go/image_api.go) is a web server and utilizes the Dapr Go SDK. + +```go +func main() { + s := daprd.NewService(":9003") + + if err := s.AddServiceInvocationHandler("/api/image", imageHandlerWASI); err != nil { + log.Fatalf("error adding invocation handler: %v", err) + } + + if err := s.Start(); err != nil && err != http.ErrServerClosed { + log.Fatalf("error listening: %v", err) + } +} +``` + +The `imageHandlerWASI()` function [starts a WasmEdge instance](../../../sdk/go/function.md) and calls the image processing (grayscale) function in it via [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md). + +Build and deploy the Go microservice to Dapr as follows. + +```bash +cd image-api-go +go build +dapr run --app-id image-api-go \ + --app-protocol http \ + --app-port 9003 \ + --dapr-http-port 3501 \ + --log-level debug \ + --components-path ../config \ + ./image-api-go +``` + +### The Rust host wrapper for microservice + +The [Rust-based microservice](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) embeds the above imaging processing function in WasmEdge. The [microservice itself](https://github.com/second-state/dapr-wasm/blob/main/image-api-rs/src/main.rs) is a Tokio and Warp based web server. + +```rust +#[tokio::main] +pub async fn run_server(port: u16) { + pretty_env_logger::init(); + let home = warp::get().map(warp::reply); + + let image = warp::post() + .and(warp::path("api")) + .and(warp::path("image")) + .and(warp::body::bytes()) + .map(|bytes: bytes::Bytes| { + let v: Vec = bytes.iter().map(|&x| x).collect(); + let res = image_process_wasmedge_sys(&v); + let _encoded = base64::encode(&res); + Response::builder() + .header("content-type", "image/png") + .body(res) + }); + + let routes = home.or(image); + let routes = routes.with(warp::cors().allow_any_origin()); + + let log = warp::log("dapr_wasm"); + let routes = routes.with(log); + warp::serve(routes).run((Ipv4Addr::UNSPECIFIED, port)).await +} +``` + +The `image_process_wasmedge_sys()` function [starts a WasmEdge instance](../../../sdk/rust/sys_run_host_func.md) and calls the image processing (grayscale) function in it via [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md). + +Build and deploy the Rust microservice to Dapr as follows. + +```bash +cd image-api-rs +cargo build --release +dapr stop image-api-rs + +# Change this to your own path for WasmEdge +export LD_LIBRARY_PATH=/home/coder/.wasmedge/lib64/ + +dapr run --app-id image-api-rs \ + --app-protocol http \ + --app-port 9004 \ + --dapr-http-port 3502 \ + --components-path ../config \ + --log-level debug \ + ./target/release/image-api-rs +``` + +That's it! [Let us know](https://github.com/WasmEdge/WasmEdge/discussions) your cool Dapr microservices in WebAssembly! diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md new file mode 100644 index 000000000..b39b7b31b --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md @@ -0,0 +1,10 @@ +--- +sidebar_position: 2 +--- + +# Apache Eventmesh + + +:::info +Coming Soon or you can [help out](https://github.com/WasmEdge/WasmEdge/issues/632) +::: \ No newline at end of file diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md new file mode 100644 index 000000000..2b31bf21a --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md @@ -0,0 +1,376 @@ +--- +sidebar_position: 7 +--- + +# Reactr + +[Reactr](https://github.com/suborbital/reactr) is a fast, performant function scheduling library written in Go. Reactr is designed to be flexible, with the ability to run embedded in your Go applications and first-class support for WebAssembly. +Taking advantage of Go's superior concurrency capabilities, Reactr can manage and execute hundreds of WebAssembly runtime instances all at once, making a great framework for server-side applications. + +Reactr allows you to run WebAssembly functions in Go, so does the [WasmEdge Go SDK](../../../sdk/go.md). +The unique feature of Reactr is that it provides a rich set of host functions in Go, which support access to networks and databases etc. Reactr then provides Rust (and Swift / AssemblyScript) APIs to call those host functions from within the WebAssembly function. + +In this article, we will show you how to use WasmEdge together with Reactr to take advantage of the best of both worlds. WasmEdge is the [fastest and most extensible WebAssembly runtime](../../../features.md). +It is also the fastest in [Reactr's official test suite](https://github.com/suborbital/reactr/runs/4476074960?check_suite_focus=true). +We will show you how to run Rust functions compiled to WebAssembly as well as JavaScript programs in WasmEdge and Reactr. + +> WasmEdge provides [advanced support for JavaScript](../../../write_wasm/js.md) including [mixing Rust with JavaScript](../../../write_wasm/js/rust.md) for improved performance. + +* [Hello world](#hello-world) +* [Database query](#database-query) +* [Embed JavaScript in Go](#embed-javascript-in-go) + +## Prerequisites + +You need have [Rust](https://www.rust-lang.org/tools/install), [Go](https://go.dev/doc/install), and [WasmEdge](../../../quick_start/install.md) installed on your system. +The GCC compiler (installed via the `build-essential` package) is also needed for WasmEdge. + +```bash +sudo apt-get update +sudo apt-get -y upgrade +sudo apt install build-essential + +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +source $HOME/.cargo/env +rustup target add wasm32-wasi + +curl -OL https://golang.org/dl/go1.17.5.linux-amd64.tar.gz +sudo tar -C /usr/local -xvf go1.17.5.linux-amd64.tar.gz +export PATH=$PATH:/usr/local/go/bin + +wget -qO- https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash +source $HOME/.wasmedge/env +``` + +## Hello world + +A simple `hello world` example for Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/hello). + +### Hello world: Rust function compiled to WebAssembly + +Let's first create [a simple Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/hello-echo/src/lib.rs) to echo hello. +The Rust function `HelloEcho::run()` is as follows. It will be exposed to the Go host application through Reactr. + +```rust +use suborbital::runnable::*; + +struct HelloEcho{} + +impl Runnable for HelloEcho { + fn run(&self, input: Vec) -> Result, RunErr> { + let in_string = String::from_utf8(input).unwrap(); + Ok(format!("hello {}", in_string).as_bytes().to_vec()) + } +} +``` + +Let's build the Rust function into a WebAssembly bytecode file. + +```bash +cd hello-echo +cargo build --target wasm32-wasi --release +cp target/wasm32-wasi/release/hello_echo.wasm .. +cd .. +``` + +### Hello world: Go host application + +Next, lets look into the [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/main.go) that executes the WebAssembly functions. +The `runBundle()` function executes the `run()` function in the `Runnable` struct once. + +```go +func runBundle() { + r := rt.New() + doWasm := r.Register("hello-echo", rwasm.NewRunner("./hello_echo.wasm")) + + res, err := doWasm([]byte("wasmWorker!")).Then() + if err != nil { + fmt.Println(err) + return + } + + fmt.Println(string(res.([]byte))) +} +``` + +The `runGroup()` function executes the Rust-compiled WebAssembly `run()` function multiple times asynchronously in a group, and receives the results as they come in. + +```go +func runGroup() { + r := rt.New() + + doWasm := r.Register("hello-echo", rwasm.NewRunner("./hello_echo.wasm")) + + grp := rt.NewGroup() + for i := 0; i < 100000; i++ { + grp.Add(doWasm([]byte(fmt.Sprintf("world %d", i)))) + } + + if err := grp.Wait(); err != nil { + fmt.Println(err) + } +} +``` + +Finally, let's run the Go host application and see the results printed to the console. + +> You must use the `-tags wasmedge` flag to take advantage of the performance and extended WebAssembly APIs provided by WasmEdge. + +```bash +go mod tidy +go run -tags wasmedge main.go +``` + +## Database query + +In [this example](https://github.com/second-state/wasm-learning/tree/master/reactr/db), we will demonstrate how to use Reactr host functions and APIs to query a PostgreSQL database from your WebAssembly function. + +### Database query: Install and set up a PostgreSQL database + +We will start a PostgreSQL instance through Docker. + +```bash +docker pull postgres +docker run --name reactr-postgres -p 5432:5432 -e POSTGRES_PASSWORD=12345 -d postgres +``` + +Next, let's create a database and populate it with some sample data. + +```bash +$ docker run -it --rm --network host postgres psql -h 127.0.0.1 -U postgres +postgres=# CREATE DATABASE reactr; +postgres=# \c reactr; + +# Create a table: +postgres=# CREATE TABLE users ( + uuid varchar(100) CONSTRAINT firstkey PRIMARY KEY, + email varchar(50) NOT NULL, + created_at date, + state char(1), + identifier integer +); +``` + +Leave this running and start another terminal window to interact with this PostgreSQL server. + +### Database query: Rust function compiled to WebAssembly + +Let's create [a Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/db/rs-db/src/lib.rs) to access the PostgreSQL database. +The Rust function `RsDbtest::run()` is as follows. It will be exposed to the Go host application through Reactr. It uses named queries such as `PGInsertUser` and `PGSelectUserWithUUID` to operate the database. Those queries are defined in the Go host application, and we will see them later. + +```rust +use suborbital::runnable::*; +use suborbital::db; +use suborbital::util; +use suborbital::db::query; +use suborbital::log; +use uuid::Uuid; + +struct RsDbtest{} + +impl Runnable for RsDbtest { + fn run(&self, _: Vec) -> Result, RunErr> { + let uuid = Uuid::new_v4().to_string(); + + let mut args: Vec = Vec::new(); + args.push(query::QueryArg::new("uuid", uuid.as_str())); + args.push(query::QueryArg::new("email", "connor@suborbital.dev")); + + match db::insert("PGInsertUser", args) { + Ok(_) => log::info("insert successful"), + Err(e) => { + return Err(RunErr::new(500, e.message.as_str())) + } + }; + + let mut args2: Vec = Vec::new(); + args2.push(query::QueryArg::new("uuid", uuid.as_str())); + + match db::update("PGUpdateUserWithUUID", args2.clone()) { + Ok(rows) => log::info(format!("update: {}", util::to_string(rows).as_str()).as_str()), + Err(e) => { + return Err(RunErr::new(500, e.message.as_str())) + } + } + + match db::select("PGSelectUserWithUUID", args2.clone()) { + Ok(result) => log::info(format!("select: {}", util::to_string(result).as_str()).as_str()), + Err(e) => { + return Err(RunErr::new(500, e.message.as_str())) + } + } + + match db::delete("PGDeleteUserWithUUID", args2.clone()) { + Ok(rows) => log::info(format!("delete: {}", util::to_string(rows).as_str()).as_str()), + Err(e) => { + return Err(RunErr::new(500, e.message.as_str())) + } + } + + ... ... + } +} +``` + +Let's build the Rust function into a WebAssembly bytecode file. + +```bash +cd rs-db +cargo build --target wasm32-wasi --release +cp target/wasm32-wasi/release/rs_db.wasm .. +cd .. +``` + +### Database query: Go host application + +The [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/db/main.go) first defines the SQL queries and gives each of them a name. +We will then pass those queries to the Reactr runtime as a configuration. + +```go +func main() { + dbConnString, exists := os.LookupEnv("REACTR_DB_CONN_STRING") + if !exists { + fmt.Println("skipping as conn string env var not set") + return + } + + q1 := rcap.Query{ + Type: rcap.QueryTypeInsert, + Name: "PGInsertUser", + VarCount: 2, + Query: ` + INSERT INTO users (uuid, email, created_at, state, identifier) + VALUES ($1, $2, NOW(), 'A', 12345)`, + } + + q2 := rcap.Query{ + Type: rcap.QueryTypeSelect, + Name: "PGSelectUserWithUUID", + VarCount: 1, + Query: ` + SELECT * FROM users + WHERE uuid = $1`, + } + + q3 := rcap.Query{ + Type: rcap.QueryTypeUpdate, + Name: "PGUpdateUserWithUUID", + VarCount: 1, + Query: ` + UPDATE users SET state='B' WHERE uuid = $1`, + } + + q4 := rcap.Query{ + Type: rcap.QueryTypeDelete, + Name: "PGDeleteUserWithUUID", + VarCount: 1, + Query: ` + DELETE FROM users WHERE uuid = $1`, + } + + config := rcap.DefaultConfigWithDB(vlog.Default(), rcap.DBTypePostgres, dbConnString, []rcap.Query{q1, q2, q3, q4}) + + r, err := rt.NewWithConfig(config) + if err != nil { + fmt.Println(err) + return + } + + ... ... +} +``` + +Then, we can run the WebAssembly function from Reactr. + +```go +func main() { + ... ... + + doWasm := r.Register("rs-db", rwasm.NewRunner("./rs_db.wasm")) + + res, err := doWasm(nil).Then() + if err != nil { + fmt.Println(err) + return + } + + fmt.Println(string(res.([]byte))) +} +``` + +Finally, let's run the Go host application and see the results printed to the console. + +> You must use the `-tags wasmedge` flag to take advantage of the performance and extended WebAssembly APIs provided by WasmEdge. + +```bash +export REACTR_DB_CONN_STRING='postgresql://postgres:12345@127.0.0.1:5432/reactr' +go mod tidy +go run -tags wasmedge main.go +``` + +## Embed JavaScript in Go + +As we mentioned, a key feature of the WasmEdge Runtime is its advanced [JavaScript support](../../../write_wasm/js.md), which allows JavaScript programs to run in lightweight, high-performance, safe, multi-language, and [Kubernetes-managed WasmEdge containers](../../kubernetes.md). +A simple example of embedded JavaScript function in Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs). + +### JavaScript example + +The [JavaScript example function](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/hello.js) is very simple. It just returns a string value. + +```javascript +let h = 'hello'; +let w = 'wasmedge'; +`${h} ${w}`; +``` + +### JavaScript example: Go host application + +The [Go host app](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/main.go) uses the Reactr API to run WasmEdge's standard JavaScript interpreter [rs_embed_js.wasm](https://github.com/second-state/wasm-learning/blob/master/reactr/quickjs/rs_embed_js.wasm). You can build your own version of JavaScript interpreter by modifying [this Rust project](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/rs-embed-js). + +> Learn more about how to embed [JavaScript code in Rust](https://github.com/second-state/wasmedge-quickjs/tree/main/examples/embed_js), and how to [use Rust to implement JavaScript APIs](../../../write_wasm/js/rust.md) in WasmEdge. + +The Go host application just need to start the job for `rs_embed_js.wasm` and pass the JavaScript content to it. The Go application can then capture and print the return value from JavaScript. + +```go +func main() { + r := rt.New() + doWasm := r.Register("hello-quickjs", rwasm.NewRunner("./rs_embed_js.wasm")) + + code, err := ioutil.ReadFile(os.Args[1]) + if err != nil { + fmt.Print(err) + } + res, err := doWasm(code).Then() + if err != nil { + fmt.Println(err) + return + } + + fmt.Println(string(res.([]byte))) +} +``` + +Run the Go host application as follows. + +```bash +$ cd quickjs +$ go mod tidy +$ go run -tags wasmedge main.go hello.js +String(JsString(hello wasmedge)) +``` + +The printed result shows the type information of the string in Rust and Go APIs. You can strip out this information by changing the Rust or Go applications. + +### JavaScript example: Feature examples + +WasmEdge supports many advanced JavaScript features. For the next step, you could try our [React SSR example](https://github.com/second-state/wasmedge-quickjs/tree/main/example_js/react_ssr) to generate an HTML UI from a Reactr function! +You can just build the `dist/main.js` from the React SSR example, and copy it over to this example folder to see it in action! + +```bash +$ cd quickjs +# copy over the dist/main.js file from the react ssr example +$ go mod tidy +$ go run -tags wasmedge main.go main.js +
This is home
This is page
+UnDefined +``` diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/_category_.json b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/_category_.json new file mode 100644 index 000000000..53e7dfdd2 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "Serviceless Platforms", + "position": 9, + "link": { + "type": "generated-index", + "description": "Run WebAssembly as an alternative lightweight runtime side-by-side with Docker and microVMs in cloud native infrastructure" + } +} diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md new file mode 100644 index 000000000..aaa628461 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md @@ -0,0 +1,266 @@ +--- +sidebar_position: 1 +--- + +# WebAssembly Serverless Functions in AWS Lambda + +In this article, we will show you two serverless functions in Rust and WasmEdge deployed on AWS Lambda. One is the image processing function, the other one is the TensorFlow inference function. + +> For the insight on why WasmEdge on AWS Lambda, please refer to the article [WebAssembly Serverless Functions in AWS Lambda](https://www.secondstate.io/articles/webassembly-serverless-functions-in-aws-lambda/) + +## Prerequisites + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on AWS Lambda. We will assume that you already have the basic knowledge of how to work with Next.js and Lambda. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://second-state.github.io/aws-lambda-wasm-runtime/) deployed through GitHub Pages is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/aws-lambda-wasm-runtime) to get started. To deploy the application on AWS Lambda, follow the guide in the repository [README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md). + +### Create the function + +This repo is a standard Next.js application. The backend serverless function is in the `api/functions/image_grayscale` folder. The `src/main.rs` file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> When we build the docker image, `api/pre.sh` is executed. `pre.sh` installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +### Create the service script to load the function + +The [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice that [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const { spawn } = require('child_process'); +const path = require('path'); + +function _runWasm(reqBody) { + return new Promise(resolve => { + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [path.join(__dirname, 'grayscale.so')]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + resolve(buf); + }); + + wasmedge.stdin.write(reqBody); + wasmedge.stdin.end(''); + }); +} +``` + +The `exports.handler` part of `hello.js` exports an async function handler, used to handle different events every time the serverless function is called. In this example, we simply process the image by calling the function above and return the result, but more complicated event-handling behavior may be defined based on your need. We also need to return some `Access-Control-Allow` headers to avoid [Cross-Origin Resource Sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) errors when calling the serverless function from a browser. You can read more about CORS errors [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS/Errors) if you encounter them when replicating our example. + +```javascript +exports.handler = async function(event, context) { + var typedArray = new Uint8Array(event.body.match(/[\da-f]{2}/gi).map(function (h) { + return parseInt(h, 16); + })); + let buf = await _runWasm(typedArray); + return { + statusCode: 200, + headers: { + "Access-Control-Allow-Headers" : "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT" + }, + body: buf.toString('hex') + }; +} +``` + +### Build the Docker image for Lambda deployment + +Now we have the WebAssembly bytecode function and the script to load and connect to the web request. In order to deploy them as a function service on AWS Lambda, you still need to package the whole thing into a Docker image. + +We are not going to cover in detail about how to build the Docker image and deploy on AWS Lambda, as there are detailed steps in the [Deploy section of the repository README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md#deploy). However, we will highlight some lines in the [`Dockerfile`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/Dockerfile) for you to avoid some pitfalls. + +```dockerfile +FROM public.ecr.aws/lambda/nodejs:14 + +# Change directory to /var/task +WORKDIR /var/task + +RUN yum update -y && yum install -y curl tar gzip + +# Bundle and pre-compile the wasm files +COPY *.wasm ./ +COPY pre.sh ./ +RUN chmod +x pre.sh +RUN ./pre.sh + +# Bundle the JS files +COPY *.js ./ + +CMD [ "hello.handler" ] +``` + +First, we are building the image from [AWS Lambda's Node.js base image](https://hub.docker.com/r/amazon/aws-lambda-nodejs). The advantage of using AWS Lambda's base image is that it includes the [Lambda Runtime Interface Client (RIC)](https://github.com/aws/aws-lambda-nodejs-runtime-interface-client), which we need to implement in our Docker image as it is required by AWS Lambda. The Amazon Linux uses `yum` as the package manager. + +> These base images contain the Amazon Linux Base operating system, the runtime for a given language, dependencies and the Lambda Runtime Interface Client (RIC), which implements the Lambda [Runtime API](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-api.html). The Lambda Runtime Interface Client allows your runtime to receive requests from and send requests to the Lambda service. + +Second, we need to put our function and all its dependencies in the `/var/task` directory. Files in other folders will not be executed by AWS Lambda. + +Third, we need to define the default command when we start our container. `CMD [ "hello.handler" ]` means that we will call the `handler` function in `hello.js` whenever our serverless function is called. Recall that we have defined and exported the handler function in the previous steps through `exports.handler = ...` in `hello.js`. + +### Optional: test the Docker image locally + +Docker images built from AWS Lambda's base images can be tested locally following [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-test.html). Local testing requires [AWS Lambda Runtime Interface Emulator (RIE)](https://github.com/aws/aws-lambda-runtime-interface-emulator), which is already installed in all of AWS Lambda's base images. To test your image, first, start the Docker container by running: + +```bash +docker run -p 9000:8080 myfunction:latest +``` + +This command sets a function endpoint on your local machine at `http://localhost:9000/2015-03-31/functions/function/invocations`. + +Then, from a separate terminal window, run: + +```bash +curl -XPOST "http://localhost:9000/2015-03-31/functions/function/invocations" -d '{}' +``` + +And you should get your expected output in the terminal. + +If you don't want to use a base image from AWS Lambda, you can also use your own base image and install RIC and/or RIE while building your Docker image. Just follow **Create an image from an alternative base image** section from [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-create.html). + +That's it! After building your Docker image, you can deploy it to AWS Lambda following steps outlined in the repository [README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md#deploy). Now your serverless function is ready to rock! + +## Example 2: AI inference + +The [second demo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/tensorflow) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the `api/functions/image-classification` folder in the `tensorflow` branch. The `src/main.rs` file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the `api/pre.sh` script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. The handler function is similar to our previous example, and is omitted here. + +```javascript +const { spawn } = require('child_process'); +const path = require('path'); + +function _runWasm(reqBody) { + return new Promise(resolve => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + {env: {'LD_LIBRARY_PATH': __dirname}} + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + resolve(d.join('')); + }); + + wasmedge.stdin.write(reqBody); + wasmedge.stdin.end(''); + }); +} + +exports.handler = ... // _runWasm(reqBody) is called in the handler +``` + +You can build your Docker image and deploy the function in the same way as outlined in the previous example. Now you have created a web app for subject classification! + +Next, it's your turn to use the [aws-lambda-wasm-runtime repo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/main) as a template to develop Rust serverless function on AWS Lambda. Looking forward to your great work. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md new file mode 100644 index 000000000..45894c3b0 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md @@ -0,0 +1,189 @@ +--- +sidebar_position: 2 +--- + +# WebAssembly Serverless Functions in Netlify + +In this article we will show you two serverless functions in Rust and WasmEdge deployed on Netlify. One is the image processing function, the other one is the TensorFlow inference function. + +> For more insights on why WasmEdge on Netlify, please refer to the article [WebAssembly Serverless Functions in Netlify](https://www.secondstate.io/articles/netlify-wasmedge-webassembly-rust-serverless/). + +## Prerequisite + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on Netlify. We will assume that you already have the basic knowledge of how to work with Next.js and Netlify. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://60fe22f9ff623f0007656040--reverent-hodgkin-dc1f51.netlify.app/) deployed on Netlify is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/netlify-wasm-runtime) to get started. To deploy the application on Netlify, just [add your github repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/). + +This repo is a standard Next.js application for the Netlify platform. The backend serverless function is in the [`api/functions/image_grayscale`](https://github.com/second-state/netlify-wasm-runtime/tree/main/api/functions/image-grayscale) folder. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/functions/image-grayscale/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> The Netlify function runs [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/pre.sh) upon setting up the serverless environment. It installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +The [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge'), + [path.join(__dirname, 'grayscale.so')]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + + res.setHeader('Content-Type', req.headers['image-type']); + res.send(buf); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +} +``` + +That's it. [Deploy the repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and you now have a Netlify Jamstack app with a high-performance Rust and WebAssembly based serverless backend. + +## Example 2: AI inference + +The [second demo](https://60ff7e2d10fe590008db70a9--reverent-hodgkin-dc1f51.netlify.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/pre.sh) script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + {env: {'LD_LIBRARY_PATH': __dirname}} + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + res.setHeader('Content-Type', `text/plain`); + res.send(d.join('')); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +} +``` + +You can now [deploy your forked repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and have a web app for subject classification. + +Next, it's your turn to develop Rust serverless functions in Netlify using the [netlify-wasm-runtime repo](https://github.com/second-state/netlify-wasm-runtime) as a template. Looking forward to your great work. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md new file mode 100644 index 000000000..e41be4cd4 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md @@ -0,0 +1,18 @@ +--- +sidebar_position: 3 +--- + +# Second State Functions + +Second State Functions, powered by WasmEdge, supports the Rust language as a first class citizen. + +It could + +* [Handle text-based input and output](https://www.secondstate.io/articles/getting-started-with-function-as-a-service-in-rust/) +* [Use Binary data as function input and output](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) +* [Mix bytes and strings in function argument and return value](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) +* [Use webhooks as function input and output](https://www.secondstate.io/articles/internet-of-functions-webhooks/) +* [Access internet resources via a `http_proxy` API](https://www.secondstate.io/articles/internet-of-functions-http-proxy/) +* [Running TensorFlow models at native speed via the WasmEdge TensorFlow API](https://www.secondstate.io/articles/wasi-tensorflow/) + +Check out the [Second State Functions](https://www.secondstate.io/faas/) website for more tutorials. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/tencent.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/tencent.md new file mode 100644 index 000000000..9937f7149 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/tencent.md @@ -0,0 +1,11 @@ +--- +sidebar_position: 4 +--- + +# WebAssembly serverless functions on Tencent Cloud + +As the main users of Tencent Cloud are from China, so the tutorial is [written in Chinese](https://my.oschina.net/u/4532842/blog/5172639). + +We also provide a code template for deploying serverless WebAssembly functions on Tencent Cloud, please check out [the tencent-scf-wasm-runtime repo](https://github.com/second-state/tencent-scf-wasm-runtime). + +Fork the repo and start writing your own rust functions. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md new file mode 100644 index 000000000..21858e0b2 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md @@ -0,0 +1,191 @@ +--- +sidebar_position: 5 +--- + +# Rust and WebAssembly Serverless functions in Vercel + +In this article, we will show you two serverless functions in Rust and WasmEdge deployed on Vercel. One is the image processing function, the other one is the TensorFlow inference function. + +> For more insights on why WasmEdge on Vercel, please refer to the article [Rust and WebAssembly Serverless Functions in Vercel](https://www.secondstate.io/articles/vercel-wasmedge-webassembly-rust/). + +## Prerequisite + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on Vercel. We will assume that you already have the basic knowledge of how to work with Vercel. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://vercel-wasm-runtime.vercel.app/) deployed on Vercel is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/vercel-wasm-runtime) to get started. To deploy the application on Vercel, just [import the Github repo](https://vercel.com/docs/git#deploying-a-git-repository) from [Vercel for Github](https://vercel.com/docs/git/vercel-for-github) web page. + +This repo is a standard Next.js application for the Vercel platform. The backend serverless function is in the [`api/functions/image_grayscale`](https://github.com/second-state/vercel-wasm-runtime/tree/main/api/functions/image-grayscale) folder. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/functions/image-grayscale/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> Vercel runs [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/pre.sh) upon setting up the serverless environment. It installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +The [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/hello.js) file conforms Vercel serverless specification. It loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge'), + [path.join(__dirname, 'grayscale.so')]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + + res.setHeader('Content-Type', req.headers['image-type']); + res.send(buf); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +} +``` + +That's it. [Deploy the repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and you now have a Vercel Jamstack app with a high-performance Rust and WebAssembly based serverless backend. + +## Example 2: AI inference + +The [second demo](https://vercel-wasm-runtime.vercel.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/vercel-wasm-runtime) as the previous example but in the `tensorflow` branch. Note: when you [import this GitHub repo](https://vercel.com/docs/git#deploying-a-git-repository) on the Vercel website, it will create a [preview URL](https://vercel.com/docs/platform/deployments#preview) for each branch. The `tensorflow` branch would have its own deployment URL. + +The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/vercel-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/pre.sh) script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/hello.js) file conforms Vercel serverless specification. It loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + {env: {'LD_LIBRARY_PATH': __dirname}} + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + res.setHeader('Content-Type', `text/plain`); + res.send(d.join('')); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +} +``` + +You can now [deploy your forked repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and have a web app for subject classification. + +Next, it's your turn to use [the vercel-wasm-runtime repo](https://github.com/second-state/vercel-wasm-runtime) as a template to develop your own Rust serverless functions in Vercel. Looking forward to your great work. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/wasm-smart-devices.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/wasm-smart-devices.md index a69dbfeb1..17cd9ad77 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/wasm-smart-devices.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/wasm-smart-devices.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 4 --- # WasmEdge On Smart Devices diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md new file mode 100644 index 000000000..05036703a --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md @@ -0,0 +1,101 @@ +--- +sidebar_position: 9 +--- + +# A simple WebAssembly example + +In this article, I will show you how to build a container image for a WebAssembly application. It can then be started and managed by Kubernetes ecosystem tools, such as CRI-O, Docker, crun, and Kubernetes. + +## Prerequisites + +> If you simply want a wasm bytecode file to test as a container image, you can skip the building process and just [download the wasm file here](https://github.com/second-state/wasm-learning/blob/master/cli/wasi/wasi_example_main.wasm). + +If you have not done so already, follow these simple instructions to [install Rust](https://www.rust-lang.org/tools/install). + +## Download example code + +```bash +git clone https://github.com/second-state/wasm-learning +cd wasm-learning/cli/wasi +``` + +## Build the WASM bytecode + +```bash +rustup target add wasm32-wasi +cargo build --target wasm32-wasi --release +``` + +The wasm bytecode application is in the `target/wasm32-wasi/release/wasi_example_main.wasm` file. You can now publish and use it as a container image. + +## Apply executable permission on the Wasm bytecode + +```bash +chmod +x target/wasm32-wasi/release/wasi_example_main.wasm +``` + +## Create Dockerfile + +Create a file called `Dockerfile` in the `target/wasm32-wasi/release/` folder with the following content: + +```dockerfile +FROM scratch +ADD wasi_example_main.wasm / +CMD ["/wasi_example_main.wasm"] +``` + +## Create container image with annotations + +> Please note that adding self-defined annotation is still a new feature in buildah. + +The `crun` container runtime can start the above WebAssembly-based container image. But it requires the `module.wasm.image/variant=compat-smart` annotation on the container image to indicate that it is a WebAssembly application without a guest OS. You can find the details in [Official crun repo](https://github.com/containers/crun/blob/main/docs/wasm-wasi-example.md). + +To add `module.wasm.image/variant=compat-smart` annotation in the container image, you will need the latest [buildah](https://buildah.io/). Currently, Docker does not support this feature. Please follow [the install instructions of buildah](https://github.com/containers/buildah/blob/main/install.md) to build the latest buildah binary. + +### Build and install the latest buildah on Ubuntu + +On Ubuntu zesty and xenial, use these commands to prepare for buildah. + +```bash +sudo apt-get -y install software-properties-common + +export OS="xUbuntu_20.04" +sudo bash -c "echo \"deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/ /\" > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list" +sudo bash -c "curl -L https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/Release.key | apt-key add -" + +sudo add-apt-repository -y ppa:alexlarsson/flatpak +sudo apt-get -y -qq update +sudo apt-get -y install bats git libapparmor-dev libdevmapper-dev libglib2.0-dev libgpgme-dev libseccomp-dev libselinux1-dev skopeo-containers go-md2man containers-common +sudo apt-get -y install golang-1.16 make +``` + +Then, follow these steps to build and install buildah on Ubuntu. + +```bash +mkdir -p ~/buildah +cd ~/buildah +export GOPATH=`pwd` +git clone https://github.com/containers/buildah ./src/github.com/containers/buildah +cd ./src/github.com/containers/buildah +PATH=/usr/lib/go-1.16/bin:$PATH make +cp bin/buildah /usr/bin/buildah +buildah --help +``` + +### Create and publish a container image with buildah + +In the `target/wasm32-wasi/release/` folder, do the following. + +```bash +$ sudo buildah build --annotation "module.wasm.image/variant=compat-smart" -t wasm-wasi-example . +# make sure docker is install and running +# systemctl status docker +# to make sure regular user can use docker +# sudo usermod -aG docker $USER +# newgrp docker + +# You may need to use docker login to create the `~/.docker/config.json` for auth. +$ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker://docker.io/wasmedge/example-wasi:latest +``` + +That's it! Now you can try to run it in [CRI-O](../cri/crio.md#run-a-simple-webassembly-app) or [Kubernetes](../kubernetes/kubernetes-crio.md#a-simple-webassembly-app)! diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/wasmedge/comparison.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/wasmedge/comparison.md new file mode 100644 index 000000000..23c93a001 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/wasmedge/comparison.md @@ -0,0 +1,29 @@ +--- +sidebar_position: 5 +--- + +# Comparison + +## What's the relationship between WebAssembly and Docker? + +Check out our infographic [WebAssembly vs. Docker](https://wasmedge.org/wasm_docker/). WebAssembly runs side by side with Docker in cloud native and edge native applications. + +## What's the difference for Native clients (NaCl), Application runtimes, and WebAssembly? + +We created a handy table for the comparison. + +| | NaCl | Application runtimes (eg Node & Python) | Docker-like container | WebAssembly | +| --- | --- | --- | --- | --- | +| Performance | Great | Poor | OK | Great | +| Resource footprint | Great | Poor | Poor | Great | +| Isolation | Poor | OK | OK | Great | +| Safety | Poor | OK | OK | Great | +| Portability | Poor | Great | OK | Great | +| Security | Poor | OK | OK | Great | +| Language and framework choice | N/A | N/A | Great | OK | +| Ease of use | OK | Great | Great | OK | +| Manageability | Poor | Poor | Great | Great | + +## What's the difference between WebAssembly and eBPF? + +`eBPF` is the bytecode format for a Linux kernel space VM that is suitable for network or security related tasks. WebAssembly is the bytecode format for a user space VM that is suited for business applications. [See details here](https://medium.com/codex/ebpf-and-webassembly-whose-vm-reigns-supreme-c2861ce08f89). \ No newline at end of file From 95b6fb1ddb4c80a0ba8ccd2cfa3816e5293dd3da Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Tue, 8 Aug 2023 12:54:49 +0530 Subject: [PATCH 02/11] Updated DOcs Signed-off-by: Adithya Krishna --- docs/embed/c++/intro.md | 5 +- docs/embed/use-case/mesh/dapr.md | 14 +- docs/embed/use-case/mesh/eventmesh.md | 2 +- docs/embed/use-case/reactr.md | 37 +- docs/embed/use-case/serverless/aws.md | 32 +- docs/embed/use-case/serverless/netlify.md | 16 +- docs/embed/use-case/serverless/secondstate.md | 12 +- docs/embed/use-case/serverless/vercel.md | 16 +- docs/start/wasmedge/comparison.md | 24 +- docu.js | 453 ------------------ .../current/embed/c++/intro.md | 5 +- .../current/embed/use-case/mesh/dapr.md | 14 +- .../current/embed/use-case/mesh/eventmesh.md | 2 +- .../current/embed/use-case/reactr.md | 37 +- .../current/embed/use-case/serverless/aws.md | 32 +- .../embed/use-case/serverless/netlify.md | 16 +- .../embed/use-case/serverless/secondstate.md | 12 +- .../embed/use-case/serverless/vercel.md | 16 +- .../current/start/wasmedge/comparison.md | 24 +- 19 files changed, 152 insertions(+), 617 deletions(-) delete mode 100644 docu.js diff --git a/docs/embed/c++/intro.md b/docs/embed/c++/intro.md index e6b7af9f5..3904b6a7a 100644 --- a/docs/embed/c++/intro.md +++ b/docs/embed/c++/intro.md @@ -46,8 +46,7 @@ int main(int argc, char** argv) { } ``` -You can use the -I flag to specify the include directories and the -L and -l flags to specify the library directories and library names, respectively. -Then you can compile the code and run: ( the 40th fibonacci number is 102334155) +You can use the -I flag to specify the include directories and the -L and -l flags to specify the library directories and library names, respectively. Then you can compile the code and run: ( the 40th fibonacci number is 102334155) ```bash gcc example.cpp -x c++ -I/path/to/wasmedge/include -L/path/to/wasmedge/lib -lwasmedge -o example @@ -96,4 +95,4 @@ int main(int argc, const char* argv[]) { } ``` -In this example, the wasmedge_vm_create_aot function is used to create a wasmedge_vm_context object in AOT mode, which is then passed as the second argument to the wasmedge_vm_run_wasm_from_file function to execute the Wasm module in AOT mode. \ No newline at end of file +In this example, the wasmedge_vm_create_aot function is used to create a wasmedge_vm_context object in AOT mode, which is then passed as the second argument to the wasmedge_vm_run_wasm_from_file function to execute the Wasm module in AOT mode. diff --git a/docs/embed/use-case/mesh/dapr.md b/docs/embed/use-case/mesh/dapr.md index 4adf085d8..6feaf448b 100644 --- a/docs/embed/use-case/mesh/dapr.md +++ b/docs/embed/use-case/mesh/dapr.md @@ -6,8 +6,8 @@ sidebar_position: 1 In this article, I will demonstrate how to use WasmEdge as a sidecar application runtime for Dapr. There are two ways to do this: -* **Standalone WasmEdge** is the **recommended approach** is to write a microservice using [Rust](../../../write_wasm/rust/networking-nonblocking.md) or [JavaScript](../../../write_wasm/js/networking.md), and run it in WasmEdge. The WasmEdge application serves web requests and communicates with the sidecar via sockets using the Dapr API. In this case, we can [run WasmEdge as a managed container in k8s](../../kubernetes/quickstart.md). -* Alternatively, Embedded WasmEdge is to create a simple microservice in Rust or Go to listen for web requests and communicate with the Dapr sidecar. It passes the request data to a WasmEdge runtime for processing. The business logic of the microservice is a WebAssembly function created and deployed by an application developer. +- **Standalone WasmEdge** is the **recommended approach** is to write a microservice using [Rust](../../../write_wasm/rust/networking-nonblocking.md) or [JavaScript](../../../write_wasm/js/networking.md), and run it in WasmEdge. The WasmEdge application serves web requests and communicates with the sidecar via sockets using the Dapr API. In this case, we can [run WasmEdge as a managed container in k8s](../../kubernetes/quickstart.md). +- Alternatively, Embedded WasmEdge is to create a simple microservice in Rust or Go to listen for web requests and communicate with the Dapr sidecar. It passes the request data to a WasmEdge runtime for processing. The business logic of the microservice is a WebAssembly function created and deployed by an application developer. > While the first approach (running the entire microservice in WasmEdge) is much preferred, we are still working on a fully fledged Dapr SDKs for WasmEdge. You can track their progress in GitHub issues -- [Rust](https://github.com/WasmEdge/WasmEdge/issues/1571) and [JavaScript](https://github.com/WasmEdge/WasmEdge/issues/1572). @@ -19,13 +19,13 @@ Fork or clone the demo application from Github. You can use this repo as your ow ```bash git clone https://github.com/second-state/dapr-wasm -```` +``` The demo has 4 Dapr sidecar applications. The [web-port](https://github.com/second-state/dapr-wasm/tree/main/web-port) project provides a public web service for a static HTML page. This is the application’s UI. From the static HTML page, the user can select a microservice to turn an input image into grayscale. All 3 microsoervices below perform the same function. They are just implemented using different approaches. -* **Standalone WasmEdge approach:** The [image-api-wasi-socket-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-wasi-socket-rs) project provides a standalone WasmEdge sidecar microservice that takes the input image and returns the grayscale image. The microservice is written in Rust and compiled into WebAssembly bytecode to run in WasmEdge. -* Embedded WasmEdge approach #1: The [image-api-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) project provides a simple Rust-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. -* Embedded WasmEdge approach #2: The [image-api-go](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) project provides a simple Go-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. +- **Standalone WasmEdge approach:** The [image-api-wasi-socket-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-wasi-socket-rs) project provides a standalone WasmEdge sidecar microservice that takes the input image and returns the grayscale image. The microservice is written in Rust and compiled into WebAssembly bytecode to run in WasmEdge. +- Embedded WasmEdge approach #1: The [image-api-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) project provides a simple Rust-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. +- Embedded WasmEdge approach #2: The [image-api-go](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) project provides a simple Go-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. You can follow the instructions in the [README](https://github.com/second-state/dapr-wasm/blob/main/README.md) to start the sidecar services. Here are commands to build the WebAssembly functions and start the sidecar services. The first set of commands deploy the static web page service and the standalone WasmEdge service written in Rust. It forms a complete application to turn an input image into grayscale. @@ -98,7 +98,7 @@ fn grayscale(image: &[u8]) -> Vec { if detected.is_err() { return buf; } - + let image_format_detected = detected.unwrap(); let img = image::load_from_memory(&image).unwrap(); let filtered = img.grayscale(); diff --git a/docs/embed/use-case/mesh/eventmesh.md b/docs/embed/use-case/mesh/eventmesh.md index b39b7b31b..9e8dfaaa4 100644 --- a/docs/embed/use-case/mesh/eventmesh.md +++ b/docs/embed/use-case/mesh/eventmesh.md @@ -7,4 +7,4 @@ sidebar_position: 2 :::info Coming Soon or you can [help out](https://github.com/WasmEdge/WasmEdge/issues/632) -::: \ No newline at end of file +::: diff --git a/docs/embed/use-case/reactr.md b/docs/embed/use-case/reactr.md index 2b31bf21a..90864c3e8 100644 --- a/docs/embed/use-case/reactr.md +++ b/docs/embed/use-case/reactr.md @@ -4,26 +4,21 @@ sidebar_position: 7 # Reactr -[Reactr](https://github.com/suborbital/reactr) is a fast, performant function scheduling library written in Go. Reactr is designed to be flexible, with the ability to run embedded in your Go applications and first-class support for WebAssembly. -Taking advantage of Go's superior concurrency capabilities, Reactr can manage and execute hundreds of WebAssembly runtime instances all at once, making a great framework for server-side applications. +[Reactr](https://github.com/suborbital/reactr) is a fast, performant function scheduling library written in Go. Reactr is designed to be flexible, with the ability to run embedded in your Go applications and first-class support for WebAssembly. Taking advantage of Go's superior concurrency capabilities, Reactr can manage and execute hundreds of WebAssembly runtime instances all at once, making a great framework for server-side applications. -Reactr allows you to run WebAssembly functions in Go, so does the [WasmEdge Go SDK](../../../sdk/go.md). -The unique feature of Reactr is that it provides a rich set of host functions in Go, which support access to networks and databases etc. Reactr then provides Rust (and Swift / AssemblyScript) APIs to call those host functions from within the WebAssembly function. +Reactr allows you to run WebAssembly functions in Go, so does the [WasmEdge Go SDK](../../../sdk/go.md). The unique feature of Reactr is that it provides a rich set of host functions in Go, which support access to networks and databases etc. Reactr then provides Rust (and Swift / AssemblyScript) APIs to call those host functions from within the WebAssembly function. -In this article, we will show you how to use WasmEdge together with Reactr to take advantage of the best of both worlds. WasmEdge is the [fastest and most extensible WebAssembly runtime](../../../features.md). -It is also the fastest in [Reactr's official test suite](https://github.com/suborbital/reactr/runs/4476074960?check_suite_focus=true). -We will show you how to run Rust functions compiled to WebAssembly as well as JavaScript programs in WasmEdge and Reactr. +In this article, we will show you how to use WasmEdge together with Reactr to take advantage of the best of both worlds. WasmEdge is the [fastest and most extensible WebAssembly runtime](../../../features.md). It is also the fastest in [Reactr's official test suite](https://github.com/suborbital/reactr/runs/4476074960?check_suite_focus=true). We will show you how to run Rust functions compiled to WebAssembly as well as JavaScript programs in WasmEdge and Reactr. > WasmEdge provides [advanced support for JavaScript](../../../write_wasm/js.md) including [mixing Rust with JavaScript](../../../write_wasm/js/rust.md) for improved performance. -* [Hello world](#hello-world) -* [Database query](#database-query) -* [Embed JavaScript in Go](#embed-javascript-in-go) +- [Hello world](#hello-world) +- [Database query](#database-query) +- [Embed JavaScript in Go](#embed-javascript-in-go) ## Prerequisites -You need have [Rust](https://www.rust-lang.org/tools/install), [Go](https://go.dev/doc/install), and [WasmEdge](../../../quick_start/install.md) installed on your system. -The GCC compiler (installed via the `build-essential` package) is also needed for WasmEdge. +You need have [Rust](https://www.rust-lang.org/tools/install), [Go](https://go.dev/doc/install), and [WasmEdge](../../../quick_start/install.md) installed on your system. The GCC compiler (installed via the `build-essential` package) is also needed for WasmEdge. ```bash sudo apt-get update @@ -48,8 +43,7 @@ A simple `hello world` example for Reactr is [available here](https://github.com ### Hello world: Rust function compiled to WebAssembly -Let's first create [a simple Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/hello-echo/src/lib.rs) to echo hello. -The Rust function `HelloEcho::run()` is as follows. It will be exposed to the Go host application through Reactr. +Let's first create [a simple Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/hello-echo/src/lib.rs) to echo hello. The Rust function `HelloEcho::run()` is as follows. It will be exposed to the Go host application through Reactr. ```rust use suborbital::runnable::*; @@ -75,8 +69,7 @@ cd .. ### Hello world: Go host application -Next, lets look into the [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/main.go) that executes the WebAssembly functions. -The `runBundle()` function executes the `run()` function in the `Runnable` struct once. +Next, lets look into the [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/main.go) that executes the WebAssembly functions. The `runBundle()` function executes the `run()` function in the `Runnable` struct once. ```go func runBundle() { @@ -155,8 +148,7 @@ Leave this running and start another terminal window to interact with this Postg ### Database query: Rust function compiled to WebAssembly -Let's create [a Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/db/rs-db/src/lib.rs) to access the PostgreSQL database. -The Rust function `RsDbtest::run()` is as follows. It will be exposed to the Go host application through Reactr. It uses named queries such as `PGInsertUser` and `PGSelectUserWithUUID` to operate the database. Those queries are defined in the Go host application, and we will see them later. +Let's create [a Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/db/rs-db/src/lib.rs) to access the PostgreSQL database. The Rust function `RsDbtest::run()` is as follows. It will be exposed to the Go host application through Reactr. It uses named queries such as `PGInsertUser` and `PGSelectUserWithUUID` to operate the database. Those queries are defined in the Go host application, and we will see them later. ```rust use suborbital::runnable::*; @@ -223,8 +215,7 @@ cd .. ### Database query: Go host application -The [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/db/main.go) first defines the SQL queries and gives each of them a name. -We will then pass those queries to the Reactr runtime as a configuration. +The [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/db/main.go) first defines the SQL queries and gives each of them a name. We will then pass those queries to the Reactr runtime as a configuration. ```go func main() { @@ -310,8 +301,7 @@ go run -tags wasmedge main.go ## Embed JavaScript in Go -As we mentioned, a key feature of the WasmEdge Runtime is its advanced [JavaScript support](../../../write_wasm/js.md), which allows JavaScript programs to run in lightweight, high-performance, safe, multi-language, and [Kubernetes-managed WasmEdge containers](../../kubernetes.md). -A simple example of embedded JavaScript function in Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs). +As we mentioned, a key feature of the WasmEdge Runtime is its advanced [JavaScript support](../../../write_wasm/js.md), which allows JavaScript programs to run in lightweight, high-performance, safe, multi-language, and [Kubernetes-managed WasmEdge containers](../../kubernetes.md). A simple example of embedded JavaScript function in Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs). ### JavaScript example @@ -363,8 +353,7 @@ The printed result shows the type information of the string in Rust and Go APIs. ### JavaScript example: Feature examples -WasmEdge supports many advanced JavaScript features. For the next step, you could try our [React SSR example](https://github.com/second-state/wasmedge-quickjs/tree/main/example_js/react_ssr) to generate an HTML UI from a Reactr function! -You can just build the `dist/main.js` from the React SSR example, and copy it over to this example folder to see it in action! +WasmEdge supports many advanced JavaScript features. For the next step, you could try our [React SSR example](https://github.com/second-state/wasmedge-quickjs/tree/main/example_js/react_ssr) to generate an HTML UI from a Reactr function! You can just build the `dist/main.js` from the React SSR example, and copy it over to this example folder to see it in action! ```bash $ cd quickjs diff --git a/docs/embed/use-case/serverless/aws.md b/docs/embed/use-case/serverless/aws.md index aaa628461..c23c56105 100644 --- a/docs/embed/use-case/serverless/aws.md +++ b/docs/embed/use-case/serverless/aws.md @@ -58,7 +58,7 @@ You can use Rust’s `cargo` tool to build the Rust program into WebAssembly byt ```bash cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi +cargo build --release --target wasm32-wasi ``` Copy the build artifacts to the `api` folder. @@ -78,8 +78,10 @@ const { spawn } = require('child_process'); const path = require('path'); function _runWasm(reqBody) { - return new Promise(resolve => { - const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [path.join(__dirname, 'grayscale.so')]); + return new Promise((resolve) => { + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); let d = []; wasmedge.stdout.on('data', (data) => { @@ -100,21 +102,25 @@ function _runWasm(reqBody) { The `exports.handler` part of `hello.js` exports an async function handler, used to handle different events every time the serverless function is called. In this example, we simply process the image by calling the function above and return the result, but more complicated event-handling behavior may be defined based on your need. We also need to return some `Access-Control-Allow` headers to avoid [Cross-Origin Resource Sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) errors when calling the serverless function from a browser. You can read more about CORS errors [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS/Errors) if you encounter them when replicating our example. ```javascript -exports.handler = async function(event, context) { - var typedArray = new Uint8Array(event.body.match(/[\da-f]{2}/gi).map(function (h) { - return parseInt(h, 16); - })); +exports.handler = async function (event, context) { + var typedArray = new Uint8Array( + event.body.match(/[\da-f]{2}/gi).map(function (h) { + return parseInt(h, 16); + }), + ); let buf = await _runWasm(typedArray); return { statusCode: 200, headers: { - "Access-Control-Allow-Headers" : "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT" + 'Access-Control-Allow-Headers': + 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': + 'DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT', }, - body: buf.toString('hex') + body: buf.toString('hex'), }; -} +}; ``` ### Build the Docker image for Lambda deployment @@ -156,7 +162,7 @@ Third, we need to define the default command when we start our container. `CMD [ Docker images built from AWS Lambda's base images can be tested locally following [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-test.html). Local testing requires [AWS Lambda Runtime Interface Emulator (RIE)](https://github.com/aws/aws-lambda-runtime-interface-emulator), which is already installed in all of AWS Lambda's base images. To test your image, first, start the Docker container by running: ```bash -docker run -p 9000:8080 myfunction:latest +docker run -p 9000:8080 myfunction:latest ``` This command sets a function endpoint on your local machine at `http://localhost:9000/2015-03-31/functions/function/invocations`. diff --git a/docs/embed/use-case/serverless/netlify.md b/docs/embed/use-case/serverless/netlify.md index 45894c3b0..0f4b82db2 100644 --- a/docs/embed/use-case/serverless/netlify.md +++ b/docs/embed/use-case/serverless/netlify.md @@ -56,7 +56,7 @@ You can use Rust’s `cargo` tool to build the Rust program into WebAssembly byt ```bash cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi +cargo build --release --target wasm32-wasi ``` Copy the build artifacts to the `api` folder. @@ -75,9 +75,9 @@ const { spawn } = require('child_process'); const path = require('path'); module.exports = (req, res) => { - const wasmedge = spawn( - path.join(__dirname, 'wasmedge'), - [path.join(__dirname, 'grayscale.so')]); + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); let d = []; wasmedge.stdout.on('data', (data) => { @@ -93,7 +93,7 @@ module.exports = (req, res) => { wasmedge.stdin.write(req.body); wasmedge.stdin.end(''); -} +}; ``` That's it. [Deploy the repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and you now have a Netlify Jamstack app with a high-performance Rust and WebAssembly based serverless backend. @@ -102,7 +102,7 @@ That's it. [Deploy the repo to Netlify](https://www.netlify.com/blog/2016/09/29/ The [second demo](https://60ff7e2d10fe590008db70a9--reverent-hodgkin-dc1f51.netlify.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. -It is in [the same GitHub repo](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. +It is in [the same GitHub repo](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. ```rust pub fn main() { @@ -166,7 +166,7 @@ module.exports = (req, res) => { const wasmedge = spawn( path.join(__dirname, 'wasmedge-tensorflow-lite'), [path.join(__dirname, 'classify.so')], - {env: {'LD_LIBRARY_PATH': __dirname}} + { env: { LD_LIBRARY_PATH: __dirname } }, ); let d = []; @@ -181,7 +181,7 @@ module.exports = (req, res) => { wasmedge.stdin.write(req.body); wasmedge.stdin.end(''); -} +}; ``` You can now [deploy your forked repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and have a web app for subject classification. diff --git a/docs/embed/use-case/serverless/secondstate.md b/docs/embed/use-case/serverless/secondstate.md index e41be4cd4..5cff16996 100644 --- a/docs/embed/use-case/serverless/secondstate.md +++ b/docs/embed/use-case/serverless/secondstate.md @@ -8,11 +8,11 @@ Second State Functions, powered by WasmEdge, supports the Rust language as a fir It could -* [Handle text-based input and output](https://www.secondstate.io/articles/getting-started-with-function-as-a-service-in-rust/) -* [Use Binary data as function input and output](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) -* [Mix bytes and strings in function argument and return value](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) -* [Use webhooks as function input and output](https://www.secondstate.io/articles/internet-of-functions-webhooks/) -* [Access internet resources via a `http_proxy` API](https://www.secondstate.io/articles/internet-of-functions-http-proxy/) -* [Running TensorFlow models at native speed via the WasmEdge TensorFlow API](https://www.secondstate.io/articles/wasi-tensorflow/) +- [Handle text-based input and output](https://www.secondstate.io/articles/getting-started-with-function-as-a-service-in-rust/) +- [Use Binary data as function input and output](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) +- [Mix bytes and strings in function argument and return value](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) +- [Use webhooks as function input and output](https://www.secondstate.io/articles/internet-of-functions-webhooks/) +- [Access internet resources via a `http_proxy` API](https://www.secondstate.io/articles/internet-of-functions-http-proxy/) +- [Running TensorFlow models at native speed via the WasmEdge TensorFlow API](https://www.secondstate.io/articles/wasi-tensorflow/) Check out the [Second State Functions](https://www.secondstate.io/faas/) website for more tutorials. diff --git a/docs/embed/use-case/serverless/vercel.md b/docs/embed/use-case/serverless/vercel.md index 21858e0b2..3ef87bd5c 100644 --- a/docs/embed/use-case/serverless/vercel.md +++ b/docs/embed/use-case/serverless/vercel.md @@ -56,7 +56,7 @@ You can use Rust’s `cargo` tool to build the Rust program into WebAssembly byt ```bash cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi +cargo build --release --target wasm32-wasi ``` Copy the build artifacts to the `api` folder. @@ -75,9 +75,9 @@ const { spawn } = require('child_process'); const path = require('path'); module.exports = (req, res) => { - const wasmedge = spawn( - path.join(__dirname, 'wasmedge'), - [path.join(__dirname, 'grayscale.so')]); + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); let d = []; wasmedge.stdout.on('data', (data) => { @@ -93,7 +93,7 @@ module.exports = (req, res) => { wasmedge.stdin.write(req.body); wasmedge.stdin.end(''); -} +}; ``` That's it. [Deploy the repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and you now have a Vercel Jamstack app with a high-performance Rust and WebAssembly based serverless backend. @@ -104,7 +104,7 @@ The [second demo](https://vercel-wasm-runtime.vercel.app/) application allows us It is in [the same GitHub repo](https://github.com/second-state/vercel-wasm-runtime) as the previous example but in the `tensorflow` branch. Note: when you [import this GitHub repo](https://vercel.com/docs/git#deploying-a-git-repository) on the Vercel website, it will create a [preview URL](https://vercel.com/docs/platform/deployments#preview) for each branch. The `tensorflow` branch would have its own deployment URL. -The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/vercel-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. +The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/vercel-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. ```rust pub fn main() { @@ -168,7 +168,7 @@ module.exports = (req, res) => { const wasmedge = spawn( path.join(__dirname, 'wasmedge-tensorflow-lite'), [path.join(__dirname, 'classify.so')], - {env: {'LD_LIBRARY_PATH': __dirname}} + { env: { LD_LIBRARY_PATH: __dirname } }, ); let d = []; @@ -183,7 +183,7 @@ module.exports = (req, res) => { wasmedge.stdin.write(req.body); wasmedge.stdin.end(''); -} +}; ``` You can now [deploy your forked repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and have a web app for subject classification. diff --git a/docs/start/wasmedge/comparison.md b/docs/start/wasmedge/comparison.md index 23c93a001..20fc78cb4 100644 --- a/docs/start/wasmedge/comparison.md +++ b/docs/start/wasmedge/comparison.md @@ -12,18 +12,18 @@ Check out our infographic [WebAssembly vs. Docker](https://wasmedge.org/wasm_doc We created a handy table for the comparison. -| | NaCl | Application runtimes (eg Node & Python) | Docker-like container | WebAssembly | -| --- | --- | --- | --- | --- | -| Performance | Great | Poor | OK | Great | -| Resource footprint | Great | Poor | Poor | Great | -| Isolation | Poor | OK | OK | Great | -| Safety | Poor | OK | OK | Great | -| Portability | Poor | Great | OK | Great | -| Security | Poor | OK | OK | Great | -| Language and framework choice | N/A | N/A | Great | OK | -| Ease of use | OK | Great | Great | OK | -| Manageability | Poor | Poor | Great | Great | +| | NaCl | Application runtimes (eg Node & Python) | Docker-like container | WebAssembly | +| --- | --- | --- | --- | --- | +| Performance | Great | Poor | OK | Great | +| Resource footprint | Great | Poor | Poor | Great | +| Isolation | Poor | OK | OK | Great | +| Safety | Poor | OK | OK | Great | +| Portability | Poor | Great | OK | Great | +| Security | Poor | OK | OK | Great | +| Language and framework choice | N/A | N/A | Great | OK | +| Ease of use | OK | Great | Great | OK | +| Manageability | Poor | Poor | Great | Great | ## What's the difference between WebAssembly and eBPF? -`eBPF` is the bytecode format for a Linux kernel space VM that is suitable for network or security related tasks. WebAssembly is the bytecode format for a user space VM that is suited for business applications. [See details here](https://medium.com/codex/ebpf-and-webassembly-whose-vm-reigns-supreme-c2861ce08f89). \ No newline at end of file +`eBPF` is the bytecode format for a Linux kernel space VM that is suitable for network or security related tasks. WebAssembly is the bytecode format for a user space VM that is suited for business applications. [See details here](https://medium.com/codex/ebpf-and-webassembly-whose-vm-reigns-supreme-c2861ce08f89). diff --git a/docu.js b/docu.js deleted file mode 100644 index 0e17036ed..000000000 --- a/docu.js +++ /dev/null @@ -1,453 +0,0 @@ -// @ts-check - -const translations = require('@verdaccio/crowdin-translations/build/progress_lang.json'); - -const lgnMapping = { - 'de-DE': 'de', - 'pl-PL': 'pl', - 'cs-CZ': 'cs', - 'fr-FR': 'fr', - 'it-IT': 'it', - 'ru-RU': 'ru', - 'vi-VN': 'vi', - 'yo-NG': 'yo', -}; - -// @ts-ignore -const progress = translations; -const limitLngIncluded = 19; -console.log('limit translation is on %s%', limitLngIncluded); -const isDeployPreview = process.env.CONTEXT === 'deploy-preview'; -const isProductionDeployment = process.env.CONTEXT === 'production'; -const filterByProgress = (items) => { - const originLng = Object.keys(translations); - return items.filter((lgn) => { - if (lgn === 'en') { - return true; - } - const _lgn = lgnMapping[lgn] ? lgnMapping[lgn] : lgn; - if (!originLng.includes(_lgn)) { - console.log(`language ${_lgn} excluded, does not exist in origin`); - return false; - } - - if (translations[_lgn].approvalProgress <= limitLngIncluded) { - console.log( - 'language %s is being excluded due does not met limit of translation, current: %s%', - _lgn, - translations[_lgn].approvalProgress - ); - return false; - } - - return true; - }); -}; - -const i18nConfig = { - defaultLocale: 'en', - locales: isDeployPreview - ? ['en'] - : filterByProgress([ - 'en', - 'cs-CZ', - 'de-DE', - 'es-ES', - 'fr-FR', - 'it-IT', - 'pl-PL', - 'pt-BR', - 'ru-RU', - 'sr-CS', - 'vi-VN', - 'yo-NG', - 'zh-TW', - 'zh-CN', - ]), - localeConfigs: { - en: { label: 'English' }, - 'it-IT': { label: `Italiano (${progress['it'].translationProgress}%)` }, - 'es-ES': { label: `Español (${progress['es-ES'].translationProgress}%)` }, - 'de-DE': { label: `Deutsch (${progress['de'].translationProgress}%)` }, - 'cs-CZ': { label: `Čeština (Česko) (${progress['cs'].translationProgress}%)` }, - 'fr-FR': { label: `Français (${progress['fr'].translationProgress}%)` }, - 'pl-PL': { label: `Polski (Polska) (${progress['pl'].translationProgress}%)` }, - 'pt-BR': { label: `Português (Brasil) (${progress['pt-BR'].translationProgress}%)` }, - 'ru-RU': { label: `Русский (Россия) (${progress['ru'].translationProgress}%)` }, - 'zh-CN': { label: `中文(中国)(${progress['zh-CN'].translationProgress}%)` }, - 'zh-TW': { label: `中文(台灣)(${progress['zh-TW'].translationProgress}%)` }, - 'yo-NG': { label: `Èdè Yorùbá (Nàìjíríà) (${progress['yo'].translationProgress}%)` }, - 'sr-CS': { label: `Српски (Србија) (${progress['sr-CS'].translationProgress}%)` }, - 'vi-VN': { label: `Tiếng Việt (Việt Nam) (${progress['vi'].translationProgress}%)` }, - }, -}; - -const pkgJson = require('./package.json'); - -module.exports = { - title: 'Verdaccio', - tagline: 'A lightweight Node.js private proxy registry', - organizationName: 'verdaccio', - projectName: 'verdaccio', - url: 'https://verdaccio.org', - baseUrl: '/', - onBrokenLinks: 'throw', - onBrokenMarkdownLinks: 'warn', - favicon: 'img/logo/uk/verdaccio-tiny-uk-no-bg.svg', - i18n: i18nConfig, - scripts: ['https://buttons.github.io/buttons.js'], - plugins: [ - 'docusaurus-plugin-sass', - 'docusaurus-plugin-contributors', - isProductionDeployment && - typeof process.env.SENTRY_KEY === 'string' && [ - 'docusaurus-plugin-sentry', - { DSN: process.env.SENTRY_KEY }, - ], - [ - 'docusaurus-plugin-typedoc', - { - entryPoints: ['../packages/node-api/src/index.ts'], - tsconfig: '../packages/node-api/tsconfig.build.json', - id: 'api/node-api', - out: 'api/node-api', - // theme: 'default', - excludePrivate: false, - excludeProtected: true, - categorizeByGroup: false, - excludeInternal: true, - sidebar: { - categoryLabel: '@verdaccio/node-api', - // position: 1, - fullNames: true, - }, - }, - ], - [ - 'content-docs', - { - id: 'community', - path: 'community', - routeBasePath: 'community', - sidebarPath: require.resolve('./sidebarsCommunity.js'), - showLastUpdateTime: true, - }, - ], - [ - 'content-docs', - { - id: 'dev', - path: 'dev', - routeBasePath: 'dev', - sidebarPath: require.resolve('./sidebarsDev.js'), - showLastUpdateTime: true, - }, - ], - [ - 'content-docs', - { - id: 'talks', - path: 'talks', - routeBasePath: 'talks', - sidebarPath: require.resolve('./sidebarsTalk.js'), - showLastUpdateTime: true, - }, - ], - [ - 'docusaurus-plugin-typedoc', - { - entryPoints: ['../packages/config/src/index.ts'], - tsconfig: '../packages/config/tsconfig.build.json', - id: 'api/config', - out: 'api/config', - sidebar: { - categoryLabel: '@verdaccio/config', - fullNames: true, - }, - }, - ], - [ - 'docusaurus-plugin-typedoc', - { - entryPoints: ['../packages/ui-components/src/index.ts'], - tsconfig: '../packages/ui-components/tsconfig.build.json', - id: 'api/ui-components', - out: 'api/ui-components', - sidebar: { - categoryLabel: '@verdaccio/ui-components', - fullNames: true, - watch: process.env.TYPEDOC_WATCH, - }, - }, - ], - [ - 'docusaurus-plugin-typedoc', - { - entryPoints: ['../packages/core/core/src/index.ts'], - tsconfig: '../packages/core/core/tsconfig.build.json', - id: 'api/core', - out: 'api/core', - sidebar: { - categoryLabel: '@verdaccio/core', - fullNames: true, - }, - }, - ], - [ - 'docusaurus-plugin-typedoc', - { - entryPoints: ['../packages/core/types/src/types.ts'], - tsconfig: '../packages/core/types/tsconfig.build.json', - id: 'api/types', - out: 'api/types', - categorizeByGroup: false, - includeVersion: true, - sidebar: { - categoryLabel: '@verdaccio/types', - fullNames: true, - }, - }, - ], - ], - markdown: { - mermaid: true, - }, - themes: ['@docusaurus/theme-mermaid'], - webpack: { - jsLoader: (isServer) => ({ - loader: require.resolve('esbuild-loader'), - options: { - loader: 'tsx', - format: isServer ? 'cjs' : undefined, - target: isServer ? 'node12' : 'es2017', - }, - }), - }, - customFields: { - description: 'A lightweight Node.js private proxy registry', - }, - themeConfig: { - mermaid: { - theme: { light: 'neutral', dark: 'forest' }, - }, - announcementBar: { - id: 'announcementBar', - content: - 'Help provide humanitarian support to Ukraine refugees!', - isCloseable: false, - backgroundColor: '#1595de', - textColor: '#ffffff', - }, - algolia: { - appId: 'B3TG5CBF5H', - apiKey: 'ed054733cb03418e9af25b7beb82c924', - indexName: 'verdaccio', - contextualSearch: true, - }, - docs: { - sidebar: { - hideable: true, - autoCollapseCategories: true, - }, - }, - navbar: { - title: `Verdaccio - v5.x`, - logo: { - alt: 'Verdaccio Logo', - src: 'img/logo/uk/verdaccio-tiny-uk-no-bg.svg', - }, - items: [ - { - type: 'doc', - docId: 'what-is-verdaccio', - position: 'left', - label: 'Docs', - }, - { - type: 'doc', - docId: 'api/node-api/index', - position: 'left', - label: 'API', - }, - { to: '/blog', label: 'Blog', position: 'left' }, - { - type: 'docsVersionDropdown', - position: 'right', - }, - { - href: 'https://opencollective.com/verdaccio', - label: 'Sponsor us', - position: 'right', - }, - { - href: '/community', - label: 'Community', - position: 'left', - }, - { - href: '/talks', - label: 'Video Talks', - position: 'left', - }, - { - type: 'localeDropdown', - position: 'right', - dropdownItemsAfter: [ - { - href: 'https://crowdin.com/project/verdaccio', - label: 'Help Us Translate', - }, - ], - }, - { - href: 'https://github.com/verdaccio/verdaccio', - position: 'right', - className: 'header-github-link', - 'aria-label': 'GitHub repository', - }, - { - href: 'https://fosstodon.org/@verdaccio', - position: 'right', - className: 'header-mastodon-link', - 'aria-label': 'Follow us at Fosstodon', - }, - ], - }, - footer: { - style: 'dark', - links: [ - { - title: 'Docs', - items: [ - { - label: 'Getting Started', - to: '/docs/what-is-verdaccio', - }, - { - label: 'Docker', - to: '/docs/docker', - }, - { - label: 'Configuration', - to: '/docs/configuration', - }, - { - label: 'Logos', - to: '/docs/logo', - }, - ], - }, - { - title: 'Community', - items: [ - { - label: 'Stack Overflow', - href: 'https://stackoverflow.com/questions/tagged/verdaccio', - }, - { - label: 'Discord', - href: 'https://discord.gg/7qWJxBf', - }, - { - html: ` - - Mastodon - - `, - }, - ], - }, - { - title: 'More', - items: [ - { - label: 'Blog', - to: '/blog', - }, - { - label: 'GitHub', - href: 'https://github.com/verdaccio/verdaccio', - }, - { - label: 'Mastodon', - href: 'https://fosstodon.org/@verdaccio', - }, - { - html: ` - - Deploys by Netlify - - `, - }, - ], - }, - ], - copyright: `Copyright © ${new Date().getFullYear()} Verdaccio community. Built with Docusaurus.`, - }, - colorMode: { - defaultMode: 'light', - disableSwitch: false, - respectPrefersColorScheme: true, - }, - prism: { - theme: require('prism-react-renderer/themes/github'), - darkTheme: require('prism-react-renderer/themes/nightOwl'), - }, - }, - presets: [ - [ - '@docusaurus/preset-classic', - { - docs: { - sidebarPath: require.resolve('./sidebars.js'), - showLastUpdateAuthor: true, - showLastUpdateTime: true, - sidebarCollapsible: true, - remarkPlugins: [[require('@docusaurus/remark-plugin-npm2yarn'), { sync: true }]], - editUrl: ({ locale, docPath }) => { - if (locale !== 'en') { - return `https://crowdin.com/project/verdaccio/${locale}`; - } - return `https://github.com/verdaccio/verdaccio/edit/master/website/docs/${docPath}`; - }, - lastVersion: '5.x', - versions: { - current: { - label: `6.x`, - }, - '5.x': { - label: `5.x (Latest)`, - }, - }, - }, - googleAnalytics: { - // trackingID: 'UA-2527438-21' - trackingID: 'G-PCYM9FYJZT', - }, - gtag: { - trackingID: 'G-PCYM9FYJZT', - }, - blog: { - blogTitle: 'Verdaccio Official Blog', - blogDescription: 'The official Verdaccio Node.js proxy registry blog', - showReadingTime: true, - postsPerPage: 3, - feedOptions: { - type: 'all', - }, - blogSidebarCount: 'ALL', - blogSidebarTitle: 'All our posts', - editUrl: ({ locale, blogDirPath, blogPath }) => { - if (locale !== 'en') { - return `https://crowdin.com/project/verdaccio/${locale}`; - } - return `https://github.com/verdaccio/verdaccio/edit/master/website/${blogDirPath}/${blogPath}`; - }, - }, - theme: { - customCss: require.resolve('./src/css/custom.scss'), - }, - }, - ], - ], -}; \ No newline at end of file diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/c++/intro.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/c++/intro.md index e6b7af9f5..3904b6a7a 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/c++/intro.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/c++/intro.md @@ -46,8 +46,7 @@ int main(int argc, char** argv) { } ``` -You can use the -I flag to specify the include directories and the -L and -l flags to specify the library directories and library names, respectively. -Then you can compile the code and run: ( the 40th fibonacci number is 102334155) +You can use the -I flag to specify the include directories and the -L and -l flags to specify the library directories and library names, respectively. Then you can compile the code and run: ( the 40th fibonacci number is 102334155) ```bash gcc example.cpp -x c++ -I/path/to/wasmedge/include -L/path/to/wasmedge/lib -lwasmedge -o example @@ -96,4 +95,4 @@ int main(int argc, const char* argv[]) { } ``` -In this example, the wasmedge_vm_create_aot function is used to create a wasmedge_vm_context object in AOT mode, which is then passed as the second argument to the wasmedge_vm_run_wasm_from_file function to execute the Wasm module in AOT mode. \ No newline at end of file +In this example, the wasmedge_vm_create_aot function is used to create a wasmedge_vm_context object in AOT mode, which is then passed as the second argument to the wasmedge_vm_run_wasm_from_file function to execute the Wasm module in AOT mode. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md index 4adf085d8..6feaf448b 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md @@ -6,8 +6,8 @@ sidebar_position: 1 In this article, I will demonstrate how to use WasmEdge as a sidecar application runtime for Dapr. There are two ways to do this: -* **Standalone WasmEdge** is the **recommended approach** is to write a microservice using [Rust](../../../write_wasm/rust/networking-nonblocking.md) or [JavaScript](../../../write_wasm/js/networking.md), and run it in WasmEdge. The WasmEdge application serves web requests and communicates with the sidecar via sockets using the Dapr API. In this case, we can [run WasmEdge as a managed container in k8s](../../kubernetes/quickstart.md). -* Alternatively, Embedded WasmEdge is to create a simple microservice in Rust or Go to listen for web requests and communicate with the Dapr sidecar. It passes the request data to a WasmEdge runtime for processing. The business logic of the microservice is a WebAssembly function created and deployed by an application developer. +- **Standalone WasmEdge** is the **recommended approach** is to write a microservice using [Rust](../../../write_wasm/rust/networking-nonblocking.md) or [JavaScript](../../../write_wasm/js/networking.md), and run it in WasmEdge. The WasmEdge application serves web requests and communicates with the sidecar via sockets using the Dapr API. In this case, we can [run WasmEdge as a managed container in k8s](../../kubernetes/quickstart.md). +- Alternatively, Embedded WasmEdge is to create a simple microservice in Rust or Go to listen for web requests and communicate with the Dapr sidecar. It passes the request data to a WasmEdge runtime for processing. The business logic of the microservice is a WebAssembly function created and deployed by an application developer. > While the first approach (running the entire microservice in WasmEdge) is much preferred, we are still working on a fully fledged Dapr SDKs for WasmEdge. You can track their progress in GitHub issues -- [Rust](https://github.com/WasmEdge/WasmEdge/issues/1571) and [JavaScript](https://github.com/WasmEdge/WasmEdge/issues/1572). @@ -19,13 +19,13 @@ Fork or clone the demo application from Github. You can use this repo as your ow ```bash git clone https://github.com/second-state/dapr-wasm -```` +``` The demo has 4 Dapr sidecar applications. The [web-port](https://github.com/second-state/dapr-wasm/tree/main/web-port) project provides a public web service for a static HTML page. This is the application’s UI. From the static HTML page, the user can select a microservice to turn an input image into grayscale. All 3 microsoervices below perform the same function. They are just implemented using different approaches. -* **Standalone WasmEdge approach:** The [image-api-wasi-socket-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-wasi-socket-rs) project provides a standalone WasmEdge sidecar microservice that takes the input image and returns the grayscale image. The microservice is written in Rust and compiled into WebAssembly bytecode to run in WasmEdge. -* Embedded WasmEdge approach #1: The [image-api-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) project provides a simple Rust-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. -* Embedded WasmEdge approach #2: The [image-api-go](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) project provides a simple Go-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. +- **Standalone WasmEdge approach:** The [image-api-wasi-socket-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-wasi-socket-rs) project provides a standalone WasmEdge sidecar microservice that takes the input image and returns the grayscale image. The microservice is written in Rust and compiled into WebAssembly bytecode to run in WasmEdge. +- Embedded WasmEdge approach #1: The [image-api-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) project provides a simple Rust-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. +- Embedded WasmEdge approach #2: The [image-api-go](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) project provides a simple Go-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. You can follow the instructions in the [README](https://github.com/second-state/dapr-wasm/blob/main/README.md) to start the sidecar services. Here are commands to build the WebAssembly functions and start the sidecar services. The first set of commands deploy the static web page service and the standalone WasmEdge service written in Rust. It forms a complete application to turn an input image into grayscale. @@ -98,7 +98,7 @@ fn grayscale(image: &[u8]) -> Vec { if detected.is_err() { return buf; } - + let image_format_detected = detected.unwrap(); let img = image::load_from_memory(&image).unwrap(); let filtered = img.grayscale(); diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md index b39b7b31b..9e8dfaaa4 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md @@ -7,4 +7,4 @@ sidebar_position: 2 :::info Coming Soon or you can [help out](https://github.com/WasmEdge/WasmEdge/issues/632) -::: \ No newline at end of file +::: diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md index 2b31bf21a..90864c3e8 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md @@ -4,26 +4,21 @@ sidebar_position: 7 # Reactr -[Reactr](https://github.com/suborbital/reactr) is a fast, performant function scheduling library written in Go. Reactr is designed to be flexible, with the ability to run embedded in your Go applications and first-class support for WebAssembly. -Taking advantage of Go's superior concurrency capabilities, Reactr can manage and execute hundreds of WebAssembly runtime instances all at once, making a great framework for server-side applications. +[Reactr](https://github.com/suborbital/reactr) is a fast, performant function scheduling library written in Go. Reactr is designed to be flexible, with the ability to run embedded in your Go applications and first-class support for WebAssembly. Taking advantage of Go's superior concurrency capabilities, Reactr can manage and execute hundreds of WebAssembly runtime instances all at once, making a great framework for server-side applications. -Reactr allows you to run WebAssembly functions in Go, so does the [WasmEdge Go SDK](../../../sdk/go.md). -The unique feature of Reactr is that it provides a rich set of host functions in Go, which support access to networks and databases etc. Reactr then provides Rust (and Swift / AssemblyScript) APIs to call those host functions from within the WebAssembly function. +Reactr allows you to run WebAssembly functions in Go, so does the [WasmEdge Go SDK](../../../sdk/go.md). The unique feature of Reactr is that it provides a rich set of host functions in Go, which support access to networks and databases etc. Reactr then provides Rust (and Swift / AssemblyScript) APIs to call those host functions from within the WebAssembly function. -In this article, we will show you how to use WasmEdge together with Reactr to take advantage of the best of both worlds. WasmEdge is the [fastest and most extensible WebAssembly runtime](../../../features.md). -It is also the fastest in [Reactr's official test suite](https://github.com/suborbital/reactr/runs/4476074960?check_suite_focus=true). -We will show you how to run Rust functions compiled to WebAssembly as well as JavaScript programs in WasmEdge and Reactr. +In this article, we will show you how to use WasmEdge together with Reactr to take advantage of the best of both worlds. WasmEdge is the [fastest and most extensible WebAssembly runtime](../../../features.md). It is also the fastest in [Reactr's official test suite](https://github.com/suborbital/reactr/runs/4476074960?check_suite_focus=true). We will show you how to run Rust functions compiled to WebAssembly as well as JavaScript programs in WasmEdge and Reactr. > WasmEdge provides [advanced support for JavaScript](../../../write_wasm/js.md) including [mixing Rust with JavaScript](../../../write_wasm/js/rust.md) for improved performance. -* [Hello world](#hello-world) -* [Database query](#database-query) -* [Embed JavaScript in Go](#embed-javascript-in-go) +- [Hello world](#hello-world) +- [Database query](#database-query) +- [Embed JavaScript in Go](#embed-javascript-in-go) ## Prerequisites -You need have [Rust](https://www.rust-lang.org/tools/install), [Go](https://go.dev/doc/install), and [WasmEdge](../../../quick_start/install.md) installed on your system. -The GCC compiler (installed via the `build-essential` package) is also needed for WasmEdge. +You need have [Rust](https://www.rust-lang.org/tools/install), [Go](https://go.dev/doc/install), and [WasmEdge](../../../quick_start/install.md) installed on your system. The GCC compiler (installed via the `build-essential` package) is also needed for WasmEdge. ```bash sudo apt-get update @@ -48,8 +43,7 @@ A simple `hello world` example for Reactr is [available here](https://github.com ### Hello world: Rust function compiled to WebAssembly -Let's first create [a simple Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/hello-echo/src/lib.rs) to echo hello. -The Rust function `HelloEcho::run()` is as follows. It will be exposed to the Go host application through Reactr. +Let's first create [a simple Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/hello-echo/src/lib.rs) to echo hello. The Rust function `HelloEcho::run()` is as follows. It will be exposed to the Go host application through Reactr. ```rust use suborbital::runnable::*; @@ -75,8 +69,7 @@ cd .. ### Hello world: Go host application -Next, lets look into the [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/main.go) that executes the WebAssembly functions. -The `runBundle()` function executes the `run()` function in the `Runnable` struct once. +Next, lets look into the [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/main.go) that executes the WebAssembly functions. The `runBundle()` function executes the `run()` function in the `Runnable` struct once. ```go func runBundle() { @@ -155,8 +148,7 @@ Leave this running and start another terminal window to interact with this Postg ### Database query: Rust function compiled to WebAssembly -Let's create [a Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/db/rs-db/src/lib.rs) to access the PostgreSQL database. -The Rust function `RsDbtest::run()` is as follows. It will be exposed to the Go host application through Reactr. It uses named queries such as `PGInsertUser` and `PGSelectUserWithUUID` to operate the database. Those queries are defined in the Go host application, and we will see them later. +Let's create [a Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/db/rs-db/src/lib.rs) to access the PostgreSQL database. The Rust function `RsDbtest::run()` is as follows. It will be exposed to the Go host application through Reactr. It uses named queries such as `PGInsertUser` and `PGSelectUserWithUUID` to operate the database. Those queries are defined in the Go host application, and we will see them later. ```rust use suborbital::runnable::*; @@ -223,8 +215,7 @@ cd .. ### Database query: Go host application -The [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/db/main.go) first defines the SQL queries and gives each of them a name. -We will then pass those queries to the Reactr runtime as a configuration. +The [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/db/main.go) first defines the SQL queries and gives each of them a name. We will then pass those queries to the Reactr runtime as a configuration. ```go func main() { @@ -310,8 +301,7 @@ go run -tags wasmedge main.go ## Embed JavaScript in Go -As we mentioned, a key feature of the WasmEdge Runtime is its advanced [JavaScript support](../../../write_wasm/js.md), which allows JavaScript programs to run in lightweight, high-performance, safe, multi-language, and [Kubernetes-managed WasmEdge containers](../../kubernetes.md). -A simple example of embedded JavaScript function in Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs). +As we mentioned, a key feature of the WasmEdge Runtime is its advanced [JavaScript support](../../../write_wasm/js.md), which allows JavaScript programs to run in lightweight, high-performance, safe, multi-language, and [Kubernetes-managed WasmEdge containers](../../kubernetes.md). A simple example of embedded JavaScript function in Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs). ### JavaScript example @@ -363,8 +353,7 @@ The printed result shows the type information of the string in Rust and Go APIs. ### JavaScript example: Feature examples -WasmEdge supports many advanced JavaScript features. For the next step, you could try our [React SSR example](https://github.com/second-state/wasmedge-quickjs/tree/main/example_js/react_ssr) to generate an HTML UI from a Reactr function! -You can just build the `dist/main.js` from the React SSR example, and copy it over to this example folder to see it in action! +WasmEdge supports many advanced JavaScript features. For the next step, you could try our [React SSR example](https://github.com/second-state/wasmedge-quickjs/tree/main/example_js/react_ssr) to generate an HTML UI from a Reactr function! You can just build the `dist/main.js` from the React SSR example, and copy it over to this example folder to see it in action! ```bash $ cd quickjs diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md index aaa628461..c23c56105 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md @@ -58,7 +58,7 @@ You can use Rust’s `cargo` tool to build the Rust program into WebAssembly byt ```bash cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi +cargo build --release --target wasm32-wasi ``` Copy the build artifacts to the `api` folder. @@ -78,8 +78,10 @@ const { spawn } = require('child_process'); const path = require('path'); function _runWasm(reqBody) { - return new Promise(resolve => { - const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [path.join(__dirname, 'grayscale.so')]); + return new Promise((resolve) => { + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); let d = []; wasmedge.stdout.on('data', (data) => { @@ -100,21 +102,25 @@ function _runWasm(reqBody) { The `exports.handler` part of `hello.js` exports an async function handler, used to handle different events every time the serverless function is called. In this example, we simply process the image by calling the function above and return the result, but more complicated event-handling behavior may be defined based on your need. We also need to return some `Access-Control-Allow` headers to avoid [Cross-Origin Resource Sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) errors when calling the serverless function from a browser. You can read more about CORS errors [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS/Errors) if you encounter them when replicating our example. ```javascript -exports.handler = async function(event, context) { - var typedArray = new Uint8Array(event.body.match(/[\da-f]{2}/gi).map(function (h) { - return parseInt(h, 16); - })); +exports.handler = async function (event, context) { + var typedArray = new Uint8Array( + event.body.match(/[\da-f]{2}/gi).map(function (h) { + return parseInt(h, 16); + }), + ); let buf = await _runWasm(typedArray); return { statusCode: 200, headers: { - "Access-Control-Allow-Headers" : "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT" + 'Access-Control-Allow-Headers': + 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': + 'DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT', }, - body: buf.toString('hex') + body: buf.toString('hex'), }; -} +}; ``` ### Build the Docker image for Lambda deployment @@ -156,7 +162,7 @@ Third, we need to define the default command when we start our container. `CMD [ Docker images built from AWS Lambda's base images can be tested locally following [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-test.html). Local testing requires [AWS Lambda Runtime Interface Emulator (RIE)](https://github.com/aws/aws-lambda-runtime-interface-emulator), which is already installed in all of AWS Lambda's base images. To test your image, first, start the Docker container by running: ```bash -docker run -p 9000:8080 myfunction:latest +docker run -p 9000:8080 myfunction:latest ``` This command sets a function endpoint on your local machine at `http://localhost:9000/2015-03-31/functions/function/invocations`. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md index 45894c3b0..0f4b82db2 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md @@ -56,7 +56,7 @@ You can use Rust’s `cargo` tool to build the Rust program into WebAssembly byt ```bash cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi +cargo build --release --target wasm32-wasi ``` Copy the build artifacts to the `api` folder. @@ -75,9 +75,9 @@ const { spawn } = require('child_process'); const path = require('path'); module.exports = (req, res) => { - const wasmedge = spawn( - path.join(__dirname, 'wasmedge'), - [path.join(__dirname, 'grayscale.so')]); + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); let d = []; wasmedge.stdout.on('data', (data) => { @@ -93,7 +93,7 @@ module.exports = (req, res) => { wasmedge.stdin.write(req.body); wasmedge.stdin.end(''); -} +}; ``` That's it. [Deploy the repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and you now have a Netlify Jamstack app with a high-performance Rust and WebAssembly based serverless backend. @@ -102,7 +102,7 @@ That's it. [Deploy the repo to Netlify](https://www.netlify.com/blog/2016/09/29/ The [second demo](https://60ff7e2d10fe590008db70a9--reverent-hodgkin-dc1f51.netlify.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. -It is in [the same GitHub repo](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. +It is in [the same GitHub repo](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. ```rust pub fn main() { @@ -166,7 +166,7 @@ module.exports = (req, res) => { const wasmedge = spawn( path.join(__dirname, 'wasmedge-tensorflow-lite'), [path.join(__dirname, 'classify.so')], - {env: {'LD_LIBRARY_PATH': __dirname}} + { env: { LD_LIBRARY_PATH: __dirname } }, ); let d = []; @@ -181,7 +181,7 @@ module.exports = (req, res) => { wasmedge.stdin.write(req.body); wasmedge.stdin.end(''); -} +}; ``` You can now [deploy your forked repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and have a web app for subject classification. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md index e41be4cd4..5cff16996 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md @@ -8,11 +8,11 @@ Second State Functions, powered by WasmEdge, supports the Rust language as a fir It could -* [Handle text-based input and output](https://www.secondstate.io/articles/getting-started-with-function-as-a-service-in-rust/) -* [Use Binary data as function input and output](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) -* [Mix bytes and strings in function argument and return value](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) -* [Use webhooks as function input and output](https://www.secondstate.io/articles/internet-of-functions-webhooks/) -* [Access internet resources via a `http_proxy` API](https://www.secondstate.io/articles/internet-of-functions-http-proxy/) -* [Running TensorFlow models at native speed via the WasmEdge TensorFlow API](https://www.secondstate.io/articles/wasi-tensorflow/) +- [Handle text-based input and output](https://www.secondstate.io/articles/getting-started-with-function-as-a-service-in-rust/) +- [Use Binary data as function input and output](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) +- [Mix bytes and strings in function argument and return value](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) +- [Use webhooks as function input and output](https://www.secondstate.io/articles/internet-of-functions-webhooks/) +- [Access internet resources via a `http_proxy` API](https://www.secondstate.io/articles/internet-of-functions-http-proxy/) +- [Running TensorFlow models at native speed via the WasmEdge TensorFlow API](https://www.secondstate.io/articles/wasi-tensorflow/) Check out the [Second State Functions](https://www.secondstate.io/faas/) website for more tutorials. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md index 21858e0b2..3ef87bd5c 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md @@ -56,7 +56,7 @@ You can use Rust’s `cargo` tool to build the Rust program into WebAssembly byt ```bash cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi +cargo build --release --target wasm32-wasi ``` Copy the build artifacts to the `api` folder. @@ -75,9 +75,9 @@ const { spawn } = require('child_process'); const path = require('path'); module.exports = (req, res) => { - const wasmedge = spawn( - path.join(__dirname, 'wasmedge'), - [path.join(__dirname, 'grayscale.so')]); + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); let d = []; wasmedge.stdout.on('data', (data) => { @@ -93,7 +93,7 @@ module.exports = (req, res) => { wasmedge.stdin.write(req.body); wasmedge.stdin.end(''); -} +}; ``` That's it. [Deploy the repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and you now have a Vercel Jamstack app with a high-performance Rust and WebAssembly based serverless backend. @@ -104,7 +104,7 @@ The [second demo](https://vercel-wasm-runtime.vercel.app/) application allows us It is in [the same GitHub repo](https://github.com/second-state/vercel-wasm-runtime) as the previous example but in the `tensorflow` branch. Note: when you [import this GitHub repo](https://vercel.com/docs/git#deploying-a-git-repository) on the Vercel website, it will create a [preview URL](https://vercel.com/docs/platform/deployments#preview) for each branch. The `tensorflow` branch would have its own deployment URL. -The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/vercel-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. +The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/vercel-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. ```rust pub fn main() { @@ -168,7 +168,7 @@ module.exports = (req, res) => { const wasmedge = spawn( path.join(__dirname, 'wasmedge-tensorflow-lite'), [path.join(__dirname, 'classify.so')], - {env: {'LD_LIBRARY_PATH': __dirname}} + { env: { LD_LIBRARY_PATH: __dirname } }, ); let d = []; @@ -183,7 +183,7 @@ module.exports = (req, res) => { wasmedge.stdin.write(req.body); wasmedge.stdin.end(''); -} +}; ``` You can now [deploy your forked repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and have a web app for subject classification. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/wasmedge/comparison.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/wasmedge/comparison.md index 23c93a001..20fc78cb4 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/start/wasmedge/comparison.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/wasmedge/comparison.md @@ -12,18 +12,18 @@ Check out our infographic [WebAssembly vs. Docker](https://wasmedge.org/wasm_doc We created a handy table for the comparison. -| | NaCl | Application runtimes (eg Node & Python) | Docker-like container | WebAssembly | -| --- | --- | --- | --- | --- | -| Performance | Great | Poor | OK | Great | -| Resource footprint | Great | Poor | Poor | Great | -| Isolation | Poor | OK | OK | Great | -| Safety | Poor | OK | OK | Great | -| Portability | Poor | Great | OK | Great | -| Security | Poor | OK | OK | Great | -| Language and framework choice | N/A | N/A | Great | OK | -| Ease of use | OK | Great | Great | OK | -| Manageability | Poor | Poor | Great | Great | +| | NaCl | Application runtimes (eg Node & Python) | Docker-like container | WebAssembly | +| --- | --- | --- | --- | --- | +| Performance | Great | Poor | OK | Great | +| Resource footprint | Great | Poor | Poor | Great | +| Isolation | Poor | OK | OK | Great | +| Safety | Poor | OK | OK | Great | +| Portability | Poor | Great | OK | Great | +| Security | Poor | OK | OK | Great | +| Language and framework choice | N/A | N/A | Great | OK | +| Ease of use | OK | Great | Great | OK | +| Manageability | Poor | Poor | Great | Great | ## What's the difference between WebAssembly and eBPF? -`eBPF` is the bytecode format for a Linux kernel space VM that is suitable for network or security related tasks. WebAssembly is the bytecode format for a user space VM that is suited for business applications. [See details here](https://medium.com/codex/ebpf-and-webassembly-whose-vm-reigns-supreme-c2861ce08f89). \ No newline at end of file +`eBPF` is the bytecode format for a Linux kernel space VM that is suitable for network or security related tasks. WebAssembly is the bytecode format for a user space VM that is suited for business applications. [See details here](https://medium.com/codex/ebpf-and-webassembly-whose-vm-reigns-supreme-c2861ce08f89). From c60f8e1559130e755d142168da858acf1ba28795 Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Fri, 11 Aug 2023 10:45:01 +0530 Subject: [PATCH 03/11] Made Requested Changes Signed-off-by: Adithya Krishna --- docs/embed/use-case/mesh/_category_.json | 8 - docs/embed/use-case/mesh/dapr.md | 263 ------------- docs/embed/use-case/mesh/eventmesh.md | 10 - docs/embed/use-case/reactr.md | 365 ------------------ docs/embed/use-case/serverless/secondstate.md | 18 - .../embed/use-case/mesh/_category_.json | 8 - .../current/embed/use-case/mesh/dapr.md | 263 ------------- .../current/embed/use-case/mesh/eventmesh.md | 10 - .../current/embed/use-case/reactr.md | 365 ------------------ .../embed/use-case/serverless/secondstate.md | 18 - 10 files changed, 1328 deletions(-) delete mode 100644 docs/embed/use-case/mesh/_category_.json delete mode 100644 docs/embed/use-case/mesh/dapr.md delete mode 100644 docs/embed/use-case/mesh/eventmesh.md delete mode 100644 docs/embed/use-case/reactr.md delete mode 100644 docs/embed/use-case/serverless/secondstate.md delete mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/_category_.json delete mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md delete mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md delete mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md delete mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md diff --git a/docs/embed/use-case/mesh/_category_.json b/docs/embed/use-case/mesh/_category_.json deleted file mode 100644 index 75cd42031..000000000 --- a/docs/embed/use-case/mesh/_category_.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "label": "Service mesh and Runtimes", - "position": 8, - "link": { - "type": "generated-index", - "description": "WasmEdge could be a lightweight runtime for sidecar microservices and the API proxy as the Docker alternative." - } -} diff --git a/docs/embed/use-case/mesh/dapr.md b/docs/embed/use-case/mesh/dapr.md deleted file mode 100644 index 6feaf448b..000000000 --- a/docs/embed/use-case/mesh/dapr.md +++ /dev/null @@ -1,263 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Dapr - -In this article, I will demonstrate how to use WasmEdge as a sidecar application runtime for Dapr. There are two ways to do this: - -- **Standalone WasmEdge** is the **recommended approach** is to write a microservice using [Rust](../../../write_wasm/rust/networking-nonblocking.md) or [JavaScript](../../../write_wasm/js/networking.md), and run it in WasmEdge. The WasmEdge application serves web requests and communicates with the sidecar via sockets using the Dapr API. In this case, we can [run WasmEdge as a managed container in k8s](../../kubernetes/quickstart.md). -- Alternatively, Embedded WasmEdge is to create a simple microservice in Rust or Go to listen for web requests and communicate with the Dapr sidecar. It passes the request data to a WasmEdge runtime for processing. The business logic of the microservice is a WebAssembly function created and deployed by an application developer. - -> While the first approach (running the entire microservice in WasmEdge) is much preferred, we are still working on a fully fledged Dapr SDKs for WasmEdge. You can track their progress in GitHub issues -- [Rust](https://github.com/WasmEdge/WasmEdge/issues/1571) and [JavaScript](https://github.com/WasmEdge/WasmEdge/issues/1572). - -## Quick start - -First you need to install [Dapr](https://docs.dapr.io/getting-started/install-dapr-cli) and [WasmEdge](../../../quick_start/install.md). [Go](https://golang.org/doc/install) and [Rust](https://www.rust-lang.org/tools/install) are optional for the standalone WasmEdge approach. However, they are required for the demo app since it showcases both standalone and embedded WasmEdge approaches. - -Fork or clone the demo application from Github. You can use this repo as your own application template. - -```bash -git clone https://github.com/second-state/dapr-wasm -``` - -The demo has 4 Dapr sidecar applications. The [web-port](https://github.com/second-state/dapr-wasm/tree/main/web-port) project provides a public web service for a static HTML page. This is the application’s UI. From the static HTML page, the user can select a microservice to turn an input image into grayscale. All 3 microsoervices below perform the same function. They are just implemented using different approaches. - -- **Standalone WasmEdge approach:** The [image-api-wasi-socket-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-wasi-socket-rs) project provides a standalone WasmEdge sidecar microservice that takes the input image and returns the grayscale image. The microservice is written in Rust and compiled into WebAssembly bytecode to run in WasmEdge. -- Embedded WasmEdge approach #1: The [image-api-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) project provides a simple Rust-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. -- Embedded WasmEdge approach #2: The [image-api-go](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) project provides a simple Go-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. - -You can follow the instructions in the [README](https://github.com/second-state/dapr-wasm/blob/main/README.md) to start the sidecar services. Here are commands to build the WebAssembly functions and start the sidecar services. The first set of commands deploy the static web page service and the standalone WasmEdge service written in Rust. It forms a complete application to turn an input image into grayscale. - -```bash -# Build and start the static HTML web page service for the UI and router for sending the uploaded image to the grayscale microservice -cd web-port -go build -./run_web.sh -cd ../ - -# Build the standalone image grayscale web service for WasmEdge -cd image-api-wasi-socket-rs -cargo build --target wasm32-wasi -cd ../ - -# Run the microservice as a Dapr sidecar app -cd image-api-wasi-socket-rs -./run_api_wasi_socket_rs.sh -cd ../ -``` - -The second set of commands create the alternative microservices for the embedded WasmEdge function. - -```bash -# Build the grayscale WebAssembly functions, and deploy them to the sidecar projects -cd functions/grayscale -./build.sh -cd ../../ - -# Build and start the Rust-based microservice for embedding the grayscale WasmEdge function -cd image-api-rs -cargo build --release -./run_api_rs.sh -cd ../ - -# Build and start the Go-based microservice for embedding the grayscale WasmEdge function -cd image-api-go -go build -./run_api_go.sh -cd ../ -``` - -Finally, you should be able to see the web UI in your browser. - -## Recommended: The standalone WasmEdge microservice in Rust - -The [standalone WasmEdge microservice](https://github.com/second-state/dapr-wasm/blob/main/image-api-wasi-socket-rs/src/main.rs) starts a non-blocking TCP server inside WasmEdge. The TCP server passes incoming requests to `handle_client()`, which passes HTTP requests to `handle_http()`, which calls `grayscale()` to process the image data in the request. - -```rust -fn main() -> std::io::Result<()> { - let port = std::env::var("PORT").unwrap_or(9005.to_string()); - println!("new connection at {}", port); - let listener = TcpListener::bind(format!("127.0.0.1:{}", port))?; - loop { - let _ = handle_client(listener.accept()?.0); - } -} - -fn handle_client(mut stream: TcpStream) -> std::io::Result<()> { - ... ... -} - -fn handle_http(req: Request>) -> bytecodec::Result> { - ... ... -} - -fn grayscale(image: &[u8]) -> Vec { - let detected = image::guess_format(&image); - let mut buf = vec![]; - if detected.is_err() { - return buf; - } - - let image_format_detected = detected.unwrap(); - let img = image::load_from_memory(&image).unwrap(); - let filtered = img.grayscale(); - match image_format_detected { - ImageFormat::Gif => { - filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); - } - _ => { - filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); - } - }; - return buf; -} -``` - -> Work in progress: It will soon interact with the Dapr sidecar through the [WasmEdge Dapr SDK in Rust](https://github.com/WasmEdge/WasmEdge/issues/1571). - -Now, you can build the microservice. It is a simple matter of compiling from Rust to WebAssembly. - -```bash -cd image-api-wasi-socket-rs -cargo build --target wasm32-wasi -``` - -Deploy the WasmEdge microservice in Dapr as follows. - -```bash -dapr run --app-id image-api-wasi-socket-rs \ - --app-protocol http \ - --app-port 9005 \ - --dapr-http-port 3503 \ - --components-path ../config \ - --log-level debug \ - wasmedge ./target/wasm32-wasi/debug/image-api-wasi-socket-rs.wasm -``` - -## Alternative: The embedded WasmEdge microservices - -The embedded WasmEdge approach requires us to create a WebAssembly function for the business logic (image processing) first, and then embed it into simple Dapr microservices. - -### Rust function for image processing - -The [Rust function](https://github.com/second-state/dapr-wasm/blob/main/functions/grayscale/src/lib.rs) is simple. It uses the [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md) macro to makes it easy to call the function from a Go or Rust host embedding the WebAssembly function. It takes and returns base64 encoded image data for the web. - -```rust -#[wasmedge_bindgen] -pub fn grayscale(image_data: String) -> String { - let image_bytes = image_data.split(",").map(|x| x.parse::().unwrap()).collect::>(); - return grayscale::grayscale_internal(&image_bytes); -} -``` - -The Rust function that actually performs the task is as follows. - -```rust -pub fn grayscale_internal(image_data: &[u8]) -> String { - let image_format_detected: ImageFormat = image::guess_format(&image_data).unwrap(); - let img = image::load_from_memory(&image_data).unwrap(); - let filtered = img.grayscale(); - let mut buf = vec![]; - match image_format_detected { - ImageFormat::Gif => { - filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); - } - _ => { - filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); - } - }; - let mut base64_encoded = String::new(); - base64::encode_config_buf(&buf, base64::STANDARD, &mut base64_encoded); - return base64_encoded.to_string(); -} -``` - -### The Go host wrapper for microservice - -The [Go-based microservice](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) embeds the above imaging processing function in WasmEdge. The [microservice itself](https://github.com/second-state/dapr-wasm/blob/main/image-api-go/image_api.go) is a web server and utilizes the Dapr Go SDK. - -```go -func main() { - s := daprd.NewService(":9003") - - if err := s.AddServiceInvocationHandler("/api/image", imageHandlerWASI); err != nil { - log.Fatalf("error adding invocation handler: %v", err) - } - - if err := s.Start(); err != nil && err != http.ErrServerClosed { - log.Fatalf("error listening: %v", err) - } -} -``` - -The `imageHandlerWASI()` function [starts a WasmEdge instance](../../../sdk/go/function.md) and calls the image processing (grayscale) function in it via [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md). - -Build and deploy the Go microservice to Dapr as follows. - -```bash -cd image-api-go -go build -dapr run --app-id image-api-go \ - --app-protocol http \ - --app-port 9003 \ - --dapr-http-port 3501 \ - --log-level debug \ - --components-path ../config \ - ./image-api-go -``` - -### The Rust host wrapper for microservice - -The [Rust-based microservice](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) embeds the above imaging processing function in WasmEdge. The [microservice itself](https://github.com/second-state/dapr-wasm/blob/main/image-api-rs/src/main.rs) is a Tokio and Warp based web server. - -```rust -#[tokio::main] -pub async fn run_server(port: u16) { - pretty_env_logger::init(); - let home = warp::get().map(warp::reply); - - let image = warp::post() - .and(warp::path("api")) - .and(warp::path("image")) - .and(warp::body::bytes()) - .map(|bytes: bytes::Bytes| { - let v: Vec = bytes.iter().map(|&x| x).collect(); - let res = image_process_wasmedge_sys(&v); - let _encoded = base64::encode(&res); - Response::builder() - .header("content-type", "image/png") - .body(res) - }); - - let routes = home.or(image); - let routes = routes.with(warp::cors().allow_any_origin()); - - let log = warp::log("dapr_wasm"); - let routes = routes.with(log); - warp::serve(routes).run((Ipv4Addr::UNSPECIFIED, port)).await -} -``` - -The `image_process_wasmedge_sys()` function [starts a WasmEdge instance](../../../sdk/rust/sys_run_host_func.md) and calls the image processing (grayscale) function in it via [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md). - -Build and deploy the Rust microservice to Dapr as follows. - -```bash -cd image-api-rs -cargo build --release -dapr stop image-api-rs - -# Change this to your own path for WasmEdge -export LD_LIBRARY_PATH=/home/coder/.wasmedge/lib64/ - -dapr run --app-id image-api-rs \ - --app-protocol http \ - --app-port 9004 \ - --dapr-http-port 3502 \ - --components-path ../config \ - --log-level debug \ - ./target/release/image-api-rs -``` - -That's it! [Let us know](https://github.com/WasmEdge/WasmEdge/discussions) your cool Dapr microservices in WebAssembly! diff --git a/docs/embed/use-case/mesh/eventmesh.md b/docs/embed/use-case/mesh/eventmesh.md deleted file mode 100644 index 9e8dfaaa4..000000000 --- a/docs/embed/use-case/mesh/eventmesh.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -sidebar_position: 2 ---- - -# Apache Eventmesh - - -:::info -Coming Soon or you can [help out](https://github.com/WasmEdge/WasmEdge/issues/632) -::: diff --git a/docs/embed/use-case/reactr.md b/docs/embed/use-case/reactr.md deleted file mode 100644 index 90864c3e8..000000000 --- a/docs/embed/use-case/reactr.md +++ /dev/null @@ -1,365 +0,0 @@ ---- -sidebar_position: 7 ---- - -# Reactr - -[Reactr](https://github.com/suborbital/reactr) is a fast, performant function scheduling library written in Go. Reactr is designed to be flexible, with the ability to run embedded in your Go applications and first-class support for WebAssembly. Taking advantage of Go's superior concurrency capabilities, Reactr can manage and execute hundreds of WebAssembly runtime instances all at once, making a great framework for server-side applications. - -Reactr allows you to run WebAssembly functions in Go, so does the [WasmEdge Go SDK](../../../sdk/go.md). The unique feature of Reactr is that it provides a rich set of host functions in Go, which support access to networks and databases etc. Reactr then provides Rust (and Swift / AssemblyScript) APIs to call those host functions from within the WebAssembly function. - -In this article, we will show you how to use WasmEdge together with Reactr to take advantage of the best of both worlds. WasmEdge is the [fastest and most extensible WebAssembly runtime](../../../features.md). It is also the fastest in [Reactr's official test suite](https://github.com/suborbital/reactr/runs/4476074960?check_suite_focus=true). We will show you how to run Rust functions compiled to WebAssembly as well as JavaScript programs in WasmEdge and Reactr. - -> WasmEdge provides [advanced support for JavaScript](../../../write_wasm/js.md) including [mixing Rust with JavaScript](../../../write_wasm/js/rust.md) for improved performance. - -- [Hello world](#hello-world) -- [Database query](#database-query) -- [Embed JavaScript in Go](#embed-javascript-in-go) - -## Prerequisites - -You need have [Rust](https://www.rust-lang.org/tools/install), [Go](https://go.dev/doc/install), and [WasmEdge](../../../quick_start/install.md) installed on your system. The GCC compiler (installed via the `build-essential` package) is also needed for WasmEdge. - -```bash -sudo apt-get update -sudo apt-get -y upgrade -sudo apt install build-essential - -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -source $HOME/.cargo/env -rustup target add wasm32-wasi - -curl -OL https://golang.org/dl/go1.17.5.linux-amd64.tar.gz -sudo tar -C /usr/local -xvf go1.17.5.linux-amd64.tar.gz -export PATH=$PATH:/usr/local/go/bin - -wget -qO- https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -source $HOME/.wasmedge/env -``` - -## Hello world - -A simple `hello world` example for Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/hello). - -### Hello world: Rust function compiled to WebAssembly - -Let's first create [a simple Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/hello-echo/src/lib.rs) to echo hello. The Rust function `HelloEcho::run()` is as follows. It will be exposed to the Go host application through Reactr. - -```rust -use suborbital::runnable::*; - -struct HelloEcho{} - -impl Runnable for HelloEcho { - fn run(&self, input: Vec) -> Result, RunErr> { - let in_string = String::from_utf8(input).unwrap(); - Ok(format!("hello {}", in_string).as_bytes().to_vec()) - } -} -``` - -Let's build the Rust function into a WebAssembly bytecode file. - -```bash -cd hello-echo -cargo build --target wasm32-wasi --release -cp target/wasm32-wasi/release/hello_echo.wasm .. -cd .. -``` - -### Hello world: Go host application - -Next, lets look into the [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/main.go) that executes the WebAssembly functions. The `runBundle()` function executes the `run()` function in the `Runnable` struct once. - -```go -func runBundle() { - r := rt.New() - doWasm := r.Register("hello-echo", rwasm.NewRunner("./hello_echo.wasm")) - - res, err := doWasm([]byte("wasmWorker!")).Then() - if err != nil { - fmt.Println(err) - return - } - - fmt.Println(string(res.([]byte))) -} -``` - -The `runGroup()` function executes the Rust-compiled WebAssembly `run()` function multiple times asynchronously in a group, and receives the results as they come in. - -```go -func runGroup() { - r := rt.New() - - doWasm := r.Register("hello-echo", rwasm.NewRunner("./hello_echo.wasm")) - - grp := rt.NewGroup() - for i := 0; i < 100000; i++ { - grp.Add(doWasm([]byte(fmt.Sprintf("world %d", i)))) - } - - if err := grp.Wait(); err != nil { - fmt.Println(err) - } -} -``` - -Finally, let's run the Go host application and see the results printed to the console. - -> You must use the `-tags wasmedge` flag to take advantage of the performance and extended WebAssembly APIs provided by WasmEdge. - -```bash -go mod tidy -go run -tags wasmedge main.go -``` - -## Database query - -In [this example](https://github.com/second-state/wasm-learning/tree/master/reactr/db), we will demonstrate how to use Reactr host functions and APIs to query a PostgreSQL database from your WebAssembly function. - -### Database query: Install and set up a PostgreSQL database - -We will start a PostgreSQL instance through Docker. - -```bash -docker pull postgres -docker run --name reactr-postgres -p 5432:5432 -e POSTGRES_PASSWORD=12345 -d postgres -``` - -Next, let's create a database and populate it with some sample data. - -```bash -$ docker run -it --rm --network host postgres psql -h 127.0.0.1 -U postgres -postgres=# CREATE DATABASE reactr; -postgres=# \c reactr; - -# Create a table: -postgres=# CREATE TABLE users ( - uuid varchar(100) CONSTRAINT firstkey PRIMARY KEY, - email varchar(50) NOT NULL, - created_at date, - state char(1), - identifier integer -); -``` - -Leave this running and start another terminal window to interact with this PostgreSQL server. - -### Database query: Rust function compiled to WebAssembly - -Let's create [a Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/db/rs-db/src/lib.rs) to access the PostgreSQL database. The Rust function `RsDbtest::run()` is as follows. It will be exposed to the Go host application through Reactr. It uses named queries such as `PGInsertUser` and `PGSelectUserWithUUID` to operate the database. Those queries are defined in the Go host application, and we will see them later. - -```rust -use suborbital::runnable::*; -use suborbital::db; -use suborbital::util; -use suborbital::db::query; -use suborbital::log; -use uuid::Uuid; - -struct RsDbtest{} - -impl Runnable for RsDbtest { - fn run(&self, _: Vec) -> Result, RunErr> { - let uuid = Uuid::new_v4().to_string(); - - let mut args: Vec = Vec::new(); - args.push(query::QueryArg::new("uuid", uuid.as_str())); - args.push(query::QueryArg::new("email", "connor@suborbital.dev")); - - match db::insert("PGInsertUser", args) { - Ok(_) => log::info("insert successful"), - Err(e) => { - return Err(RunErr::new(500, e.message.as_str())) - } - }; - - let mut args2: Vec = Vec::new(); - args2.push(query::QueryArg::new("uuid", uuid.as_str())); - - match db::update("PGUpdateUserWithUUID", args2.clone()) { - Ok(rows) => log::info(format!("update: {}", util::to_string(rows).as_str()).as_str()), - Err(e) => { - return Err(RunErr::new(500, e.message.as_str())) - } - } - - match db::select("PGSelectUserWithUUID", args2.clone()) { - Ok(result) => log::info(format!("select: {}", util::to_string(result).as_str()).as_str()), - Err(e) => { - return Err(RunErr::new(500, e.message.as_str())) - } - } - - match db::delete("PGDeleteUserWithUUID", args2.clone()) { - Ok(rows) => log::info(format!("delete: {}", util::to_string(rows).as_str()).as_str()), - Err(e) => { - return Err(RunErr::new(500, e.message.as_str())) - } - } - - ... ... - } -} -``` - -Let's build the Rust function into a WebAssembly bytecode file. - -```bash -cd rs-db -cargo build --target wasm32-wasi --release -cp target/wasm32-wasi/release/rs_db.wasm .. -cd .. -``` - -### Database query: Go host application - -The [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/db/main.go) first defines the SQL queries and gives each of them a name. We will then pass those queries to the Reactr runtime as a configuration. - -```go -func main() { - dbConnString, exists := os.LookupEnv("REACTR_DB_CONN_STRING") - if !exists { - fmt.Println("skipping as conn string env var not set") - return - } - - q1 := rcap.Query{ - Type: rcap.QueryTypeInsert, - Name: "PGInsertUser", - VarCount: 2, - Query: ` - INSERT INTO users (uuid, email, created_at, state, identifier) - VALUES ($1, $2, NOW(), 'A', 12345)`, - } - - q2 := rcap.Query{ - Type: rcap.QueryTypeSelect, - Name: "PGSelectUserWithUUID", - VarCount: 1, - Query: ` - SELECT * FROM users - WHERE uuid = $1`, - } - - q3 := rcap.Query{ - Type: rcap.QueryTypeUpdate, - Name: "PGUpdateUserWithUUID", - VarCount: 1, - Query: ` - UPDATE users SET state='B' WHERE uuid = $1`, - } - - q4 := rcap.Query{ - Type: rcap.QueryTypeDelete, - Name: "PGDeleteUserWithUUID", - VarCount: 1, - Query: ` - DELETE FROM users WHERE uuid = $1`, - } - - config := rcap.DefaultConfigWithDB(vlog.Default(), rcap.DBTypePostgres, dbConnString, []rcap.Query{q1, q2, q3, q4}) - - r, err := rt.NewWithConfig(config) - if err != nil { - fmt.Println(err) - return - } - - ... ... -} -``` - -Then, we can run the WebAssembly function from Reactr. - -```go -func main() { - ... ... - - doWasm := r.Register("rs-db", rwasm.NewRunner("./rs_db.wasm")) - - res, err := doWasm(nil).Then() - if err != nil { - fmt.Println(err) - return - } - - fmt.Println(string(res.([]byte))) -} -``` - -Finally, let's run the Go host application and see the results printed to the console. - -> You must use the `-tags wasmedge` flag to take advantage of the performance and extended WebAssembly APIs provided by WasmEdge. - -```bash -export REACTR_DB_CONN_STRING='postgresql://postgres:12345@127.0.0.1:5432/reactr' -go mod tidy -go run -tags wasmedge main.go -``` - -## Embed JavaScript in Go - -As we mentioned, a key feature of the WasmEdge Runtime is its advanced [JavaScript support](../../../write_wasm/js.md), which allows JavaScript programs to run in lightweight, high-performance, safe, multi-language, and [Kubernetes-managed WasmEdge containers](../../kubernetes.md). A simple example of embedded JavaScript function in Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs). - -### JavaScript example - -The [JavaScript example function](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/hello.js) is very simple. It just returns a string value. - -```javascript -let h = 'hello'; -let w = 'wasmedge'; -`${h} ${w}`; -``` - -### JavaScript example: Go host application - -The [Go host app](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/main.go) uses the Reactr API to run WasmEdge's standard JavaScript interpreter [rs_embed_js.wasm](https://github.com/second-state/wasm-learning/blob/master/reactr/quickjs/rs_embed_js.wasm). You can build your own version of JavaScript interpreter by modifying [this Rust project](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/rs-embed-js). - -> Learn more about how to embed [JavaScript code in Rust](https://github.com/second-state/wasmedge-quickjs/tree/main/examples/embed_js), and how to [use Rust to implement JavaScript APIs](../../../write_wasm/js/rust.md) in WasmEdge. - -The Go host application just need to start the job for `rs_embed_js.wasm` and pass the JavaScript content to it. The Go application can then capture and print the return value from JavaScript. - -```go -func main() { - r := rt.New() - doWasm := r.Register("hello-quickjs", rwasm.NewRunner("./rs_embed_js.wasm")) - - code, err := ioutil.ReadFile(os.Args[1]) - if err != nil { - fmt.Print(err) - } - res, err := doWasm(code).Then() - if err != nil { - fmt.Println(err) - return - } - - fmt.Println(string(res.([]byte))) -} -``` - -Run the Go host application as follows. - -```bash -$ cd quickjs -$ go mod tidy -$ go run -tags wasmedge main.go hello.js -String(JsString(hello wasmedge)) -``` - -The printed result shows the type information of the string in Rust and Go APIs. You can strip out this information by changing the Rust or Go applications. - -### JavaScript example: Feature examples - -WasmEdge supports many advanced JavaScript features. For the next step, you could try our [React SSR example](https://github.com/second-state/wasmedge-quickjs/tree/main/example_js/react_ssr) to generate an HTML UI from a Reactr function! You can just build the `dist/main.js` from the React SSR example, and copy it over to this example folder to see it in action! - -```bash -$ cd quickjs -# copy over the dist/main.js file from the react ssr example -$ go mod tidy -$ go run -tags wasmedge main.go main.js -
This is home
This is page
-UnDefined -``` diff --git a/docs/embed/use-case/serverless/secondstate.md b/docs/embed/use-case/serverless/secondstate.md deleted file mode 100644 index 5cff16996..000000000 --- a/docs/embed/use-case/serverless/secondstate.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -sidebar_position: 3 ---- - -# Second State Functions - -Second State Functions, powered by WasmEdge, supports the Rust language as a first class citizen. - -It could - -- [Handle text-based input and output](https://www.secondstate.io/articles/getting-started-with-function-as-a-service-in-rust/) -- [Use Binary data as function input and output](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) -- [Mix bytes and strings in function argument and return value](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) -- [Use webhooks as function input and output](https://www.secondstate.io/articles/internet-of-functions-webhooks/) -- [Access internet resources via a `http_proxy` API](https://www.secondstate.io/articles/internet-of-functions-http-proxy/) -- [Running TensorFlow models at native speed via the WasmEdge TensorFlow API](https://www.secondstate.io/articles/wasi-tensorflow/) - -Check out the [Second State Functions](https://www.secondstate.io/faas/) website for more tutorials. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/_category_.json b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/_category_.json deleted file mode 100644 index 75cd42031..000000000 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/_category_.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "label": "Service mesh and Runtimes", - "position": 8, - "link": { - "type": "generated-index", - "description": "WasmEdge could be a lightweight runtime for sidecar microservices and the API proxy as the Docker alternative." - } -} diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md deleted file mode 100644 index 6feaf448b..000000000 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/dapr.md +++ /dev/null @@ -1,263 +0,0 @@ ---- -sidebar_position: 1 ---- - -# Dapr - -In this article, I will demonstrate how to use WasmEdge as a sidecar application runtime for Dapr. There are two ways to do this: - -- **Standalone WasmEdge** is the **recommended approach** is to write a microservice using [Rust](../../../write_wasm/rust/networking-nonblocking.md) or [JavaScript](../../../write_wasm/js/networking.md), and run it in WasmEdge. The WasmEdge application serves web requests and communicates with the sidecar via sockets using the Dapr API. In this case, we can [run WasmEdge as a managed container in k8s](../../kubernetes/quickstart.md). -- Alternatively, Embedded WasmEdge is to create a simple microservice in Rust or Go to listen for web requests and communicate with the Dapr sidecar. It passes the request data to a WasmEdge runtime for processing. The business logic of the microservice is a WebAssembly function created and deployed by an application developer. - -> While the first approach (running the entire microservice in WasmEdge) is much preferred, we are still working on a fully fledged Dapr SDKs for WasmEdge. You can track their progress in GitHub issues -- [Rust](https://github.com/WasmEdge/WasmEdge/issues/1571) and [JavaScript](https://github.com/WasmEdge/WasmEdge/issues/1572). - -## Quick start - -First you need to install [Dapr](https://docs.dapr.io/getting-started/install-dapr-cli) and [WasmEdge](../../../quick_start/install.md). [Go](https://golang.org/doc/install) and [Rust](https://www.rust-lang.org/tools/install) are optional for the standalone WasmEdge approach. However, they are required for the demo app since it showcases both standalone and embedded WasmEdge approaches. - -Fork or clone the demo application from Github. You can use this repo as your own application template. - -```bash -git clone https://github.com/second-state/dapr-wasm -``` - -The demo has 4 Dapr sidecar applications. The [web-port](https://github.com/second-state/dapr-wasm/tree/main/web-port) project provides a public web service for a static HTML page. This is the application’s UI. From the static HTML page, the user can select a microservice to turn an input image into grayscale. All 3 microsoervices below perform the same function. They are just implemented using different approaches. - -- **Standalone WasmEdge approach:** The [image-api-wasi-socket-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-wasi-socket-rs) project provides a standalone WasmEdge sidecar microservice that takes the input image and returns the grayscale image. The microservice is written in Rust and compiled into WebAssembly bytecode to run in WasmEdge. -- Embedded WasmEdge approach #1: The [image-api-rs](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) project provides a simple Rust-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. -- Embedded WasmEdge approach #2: The [image-api-go](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) project provides a simple Go-based microservice. It embeds a [WasmEdge function](https://github.com/second-state/dapr-wasm/tree/main/functions/grayscale) to turn an input image into a grayscale image. - -You can follow the instructions in the [README](https://github.com/second-state/dapr-wasm/blob/main/README.md) to start the sidecar services. Here are commands to build the WebAssembly functions and start the sidecar services. The first set of commands deploy the static web page service and the standalone WasmEdge service written in Rust. It forms a complete application to turn an input image into grayscale. - -```bash -# Build and start the static HTML web page service for the UI and router for sending the uploaded image to the grayscale microservice -cd web-port -go build -./run_web.sh -cd ../ - -# Build the standalone image grayscale web service for WasmEdge -cd image-api-wasi-socket-rs -cargo build --target wasm32-wasi -cd ../ - -# Run the microservice as a Dapr sidecar app -cd image-api-wasi-socket-rs -./run_api_wasi_socket_rs.sh -cd ../ -``` - -The second set of commands create the alternative microservices for the embedded WasmEdge function. - -```bash -# Build the grayscale WebAssembly functions, and deploy them to the sidecar projects -cd functions/grayscale -./build.sh -cd ../../ - -# Build and start the Rust-based microservice for embedding the grayscale WasmEdge function -cd image-api-rs -cargo build --release -./run_api_rs.sh -cd ../ - -# Build and start the Go-based microservice for embedding the grayscale WasmEdge function -cd image-api-go -go build -./run_api_go.sh -cd ../ -``` - -Finally, you should be able to see the web UI in your browser. - -## Recommended: The standalone WasmEdge microservice in Rust - -The [standalone WasmEdge microservice](https://github.com/second-state/dapr-wasm/blob/main/image-api-wasi-socket-rs/src/main.rs) starts a non-blocking TCP server inside WasmEdge. The TCP server passes incoming requests to `handle_client()`, which passes HTTP requests to `handle_http()`, which calls `grayscale()` to process the image data in the request. - -```rust -fn main() -> std::io::Result<()> { - let port = std::env::var("PORT").unwrap_or(9005.to_string()); - println!("new connection at {}", port); - let listener = TcpListener::bind(format!("127.0.0.1:{}", port))?; - loop { - let _ = handle_client(listener.accept()?.0); - } -} - -fn handle_client(mut stream: TcpStream) -> std::io::Result<()> { - ... ... -} - -fn handle_http(req: Request>) -> bytecodec::Result> { - ... ... -} - -fn grayscale(image: &[u8]) -> Vec { - let detected = image::guess_format(&image); - let mut buf = vec![]; - if detected.is_err() { - return buf; - } - - let image_format_detected = detected.unwrap(); - let img = image::load_from_memory(&image).unwrap(); - let filtered = img.grayscale(); - match image_format_detected { - ImageFormat::Gif => { - filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); - } - _ => { - filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); - } - }; - return buf; -} -``` - -> Work in progress: It will soon interact with the Dapr sidecar through the [WasmEdge Dapr SDK in Rust](https://github.com/WasmEdge/WasmEdge/issues/1571). - -Now, you can build the microservice. It is a simple matter of compiling from Rust to WebAssembly. - -```bash -cd image-api-wasi-socket-rs -cargo build --target wasm32-wasi -``` - -Deploy the WasmEdge microservice in Dapr as follows. - -```bash -dapr run --app-id image-api-wasi-socket-rs \ - --app-protocol http \ - --app-port 9005 \ - --dapr-http-port 3503 \ - --components-path ../config \ - --log-level debug \ - wasmedge ./target/wasm32-wasi/debug/image-api-wasi-socket-rs.wasm -``` - -## Alternative: The embedded WasmEdge microservices - -The embedded WasmEdge approach requires us to create a WebAssembly function for the business logic (image processing) first, and then embed it into simple Dapr microservices. - -### Rust function for image processing - -The [Rust function](https://github.com/second-state/dapr-wasm/blob/main/functions/grayscale/src/lib.rs) is simple. It uses the [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md) macro to makes it easy to call the function from a Go or Rust host embedding the WebAssembly function. It takes and returns base64 encoded image data for the web. - -```rust -#[wasmedge_bindgen] -pub fn grayscale(image_data: String) -> String { - let image_bytes = image_data.split(",").map(|x| x.parse::().unwrap()).collect::>(); - return grayscale::grayscale_internal(&image_bytes); -} -``` - -The Rust function that actually performs the task is as follows. - -```rust -pub fn grayscale_internal(image_data: &[u8]) -> String { - let image_format_detected: ImageFormat = image::guess_format(&image_data).unwrap(); - let img = image::load_from_memory(&image_data).unwrap(); - let filtered = img.grayscale(); - let mut buf = vec![]; - match image_format_detected { - ImageFormat::Gif => { - filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); - } - _ => { - filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); - } - }; - let mut base64_encoded = String::new(); - base64::encode_config_buf(&buf, base64::STANDARD, &mut base64_encoded); - return base64_encoded.to_string(); -} -``` - -### The Go host wrapper for microservice - -The [Go-based microservice](https://github.com/second-state/dapr-wasm/tree/main/image-api-go) embeds the above imaging processing function in WasmEdge. The [microservice itself](https://github.com/second-state/dapr-wasm/blob/main/image-api-go/image_api.go) is a web server and utilizes the Dapr Go SDK. - -```go -func main() { - s := daprd.NewService(":9003") - - if err := s.AddServiceInvocationHandler("/api/image", imageHandlerWASI); err != nil { - log.Fatalf("error adding invocation handler: %v", err) - } - - if err := s.Start(); err != nil && err != http.ErrServerClosed { - log.Fatalf("error listening: %v", err) - } -} -``` - -The `imageHandlerWASI()` function [starts a WasmEdge instance](../../../sdk/go/function.md) and calls the image processing (grayscale) function in it via [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md). - -Build and deploy the Go microservice to Dapr as follows. - -```bash -cd image-api-go -go build -dapr run --app-id image-api-go \ - --app-protocol http \ - --app-port 9003 \ - --dapr-http-port 3501 \ - --log-level debug \ - --components-path ../config \ - ./image-api-go -``` - -### The Rust host wrapper for microservice - -The [Rust-based microservice](https://github.com/second-state/dapr-wasm/tree/main/image-api-rs) embeds the above imaging processing function in WasmEdge. The [microservice itself](https://github.com/second-state/dapr-wasm/blob/main/image-api-rs/src/main.rs) is a Tokio and Warp based web server. - -```rust -#[tokio::main] -pub async fn run_server(port: u16) { - pretty_env_logger::init(); - let home = warp::get().map(warp::reply); - - let image = warp::post() - .and(warp::path("api")) - .and(warp::path("image")) - .and(warp::body::bytes()) - .map(|bytes: bytes::Bytes| { - let v: Vec = bytes.iter().map(|&x| x).collect(); - let res = image_process_wasmedge_sys(&v); - let _encoded = base64::encode(&res); - Response::builder() - .header("content-type", "image/png") - .body(res) - }); - - let routes = home.or(image); - let routes = routes.with(warp::cors().allow_any_origin()); - - let log = warp::log("dapr_wasm"); - let routes = routes.with(log); - warp::serve(routes).run((Ipv4Addr::UNSPECIFIED, port)).await -} -``` - -The `image_process_wasmedge_sys()` function [starts a WasmEdge instance](../../../sdk/rust/sys_run_host_func.md) and calls the image processing (grayscale) function in it via [wasmedge_bindgen](../../../write_wasm/rust/bindgen.md). - -Build and deploy the Rust microservice to Dapr as follows. - -```bash -cd image-api-rs -cargo build --release -dapr stop image-api-rs - -# Change this to your own path for WasmEdge -export LD_LIBRARY_PATH=/home/coder/.wasmedge/lib64/ - -dapr run --app-id image-api-rs \ - --app-protocol http \ - --app-port 9004 \ - --dapr-http-port 3502 \ - --components-path ../config \ - --log-level debug \ - ./target/release/image-api-rs -``` - -That's it! [Let us know](https://github.com/WasmEdge/WasmEdge/discussions) your cool Dapr microservices in WebAssembly! diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md deleted file mode 100644 index 9e8dfaaa4..000000000 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/mesh/eventmesh.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -sidebar_position: 2 ---- - -# Apache Eventmesh - - -:::info -Coming Soon or you can [help out](https://github.com/WasmEdge/WasmEdge/issues/632) -::: diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md deleted file mode 100644 index 90864c3e8..000000000 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/reactr.md +++ /dev/null @@ -1,365 +0,0 @@ ---- -sidebar_position: 7 ---- - -# Reactr - -[Reactr](https://github.com/suborbital/reactr) is a fast, performant function scheduling library written in Go. Reactr is designed to be flexible, with the ability to run embedded in your Go applications and first-class support for WebAssembly. Taking advantage of Go's superior concurrency capabilities, Reactr can manage and execute hundreds of WebAssembly runtime instances all at once, making a great framework for server-side applications. - -Reactr allows you to run WebAssembly functions in Go, so does the [WasmEdge Go SDK](../../../sdk/go.md). The unique feature of Reactr is that it provides a rich set of host functions in Go, which support access to networks and databases etc. Reactr then provides Rust (and Swift / AssemblyScript) APIs to call those host functions from within the WebAssembly function. - -In this article, we will show you how to use WasmEdge together with Reactr to take advantage of the best of both worlds. WasmEdge is the [fastest and most extensible WebAssembly runtime](../../../features.md). It is also the fastest in [Reactr's official test suite](https://github.com/suborbital/reactr/runs/4476074960?check_suite_focus=true). We will show you how to run Rust functions compiled to WebAssembly as well as JavaScript programs in WasmEdge and Reactr. - -> WasmEdge provides [advanced support for JavaScript](../../../write_wasm/js.md) including [mixing Rust with JavaScript](../../../write_wasm/js/rust.md) for improved performance. - -- [Hello world](#hello-world) -- [Database query](#database-query) -- [Embed JavaScript in Go](#embed-javascript-in-go) - -## Prerequisites - -You need have [Rust](https://www.rust-lang.org/tools/install), [Go](https://go.dev/doc/install), and [WasmEdge](../../../quick_start/install.md) installed on your system. The GCC compiler (installed via the `build-essential` package) is also needed for WasmEdge. - -```bash -sudo apt-get update -sudo apt-get -y upgrade -sudo apt install build-essential - -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -source $HOME/.cargo/env -rustup target add wasm32-wasi - -curl -OL https://golang.org/dl/go1.17.5.linux-amd64.tar.gz -sudo tar -C /usr/local -xvf go1.17.5.linux-amd64.tar.gz -export PATH=$PATH:/usr/local/go/bin - -wget -qO- https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -source $HOME/.wasmedge/env -``` - -## Hello world - -A simple `hello world` example for Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/hello). - -### Hello world: Rust function compiled to WebAssembly - -Let's first create [a simple Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/hello-echo/src/lib.rs) to echo hello. The Rust function `HelloEcho::run()` is as follows. It will be exposed to the Go host application through Reactr. - -```rust -use suborbital::runnable::*; - -struct HelloEcho{} - -impl Runnable for HelloEcho { - fn run(&self, input: Vec) -> Result, RunErr> { - let in_string = String::from_utf8(input).unwrap(); - Ok(format!("hello {}", in_string).as_bytes().to_vec()) - } -} -``` - -Let's build the Rust function into a WebAssembly bytecode file. - -```bash -cd hello-echo -cargo build --target wasm32-wasi --release -cp target/wasm32-wasi/release/hello_echo.wasm .. -cd .. -``` - -### Hello world: Go host application - -Next, lets look into the [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/hello/main.go) that executes the WebAssembly functions. The `runBundle()` function executes the `run()` function in the `Runnable` struct once. - -```go -func runBundle() { - r := rt.New() - doWasm := r.Register("hello-echo", rwasm.NewRunner("./hello_echo.wasm")) - - res, err := doWasm([]byte("wasmWorker!")).Then() - if err != nil { - fmt.Println(err) - return - } - - fmt.Println(string(res.([]byte))) -} -``` - -The `runGroup()` function executes the Rust-compiled WebAssembly `run()` function multiple times asynchronously in a group, and receives the results as they come in. - -```go -func runGroup() { - r := rt.New() - - doWasm := r.Register("hello-echo", rwasm.NewRunner("./hello_echo.wasm")) - - grp := rt.NewGroup() - for i := 0; i < 100000; i++ { - grp.Add(doWasm([]byte(fmt.Sprintf("world %d", i)))) - } - - if err := grp.Wait(); err != nil { - fmt.Println(err) - } -} -``` - -Finally, let's run the Go host application and see the results printed to the console. - -> You must use the `-tags wasmedge` flag to take advantage of the performance and extended WebAssembly APIs provided by WasmEdge. - -```bash -go mod tidy -go run -tags wasmedge main.go -``` - -## Database query - -In [this example](https://github.com/second-state/wasm-learning/tree/master/reactr/db), we will demonstrate how to use Reactr host functions and APIs to query a PostgreSQL database from your WebAssembly function. - -### Database query: Install and set up a PostgreSQL database - -We will start a PostgreSQL instance through Docker. - -```bash -docker pull postgres -docker run --name reactr-postgres -p 5432:5432 -e POSTGRES_PASSWORD=12345 -d postgres -``` - -Next, let's create a database and populate it with some sample data. - -```bash -$ docker run -it --rm --network host postgres psql -h 127.0.0.1 -U postgres -postgres=# CREATE DATABASE reactr; -postgres=# \c reactr; - -# Create a table: -postgres=# CREATE TABLE users ( - uuid varchar(100) CONSTRAINT firstkey PRIMARY KEY, - email varchar(50) NOT NULL, - created_at date, - state char(1), - identifier integer -); -``` - -Leave this running and start another terminal window to interact with this PostgreSQL server. - -### Database query: Rust function compiled to WebAssembly - -Let's create [a Rust function](https://github.com/second-state/wasm-learning/blob/master/reactr/db/rs-db/src/lib.rs) to access the PostgreSQL database. The Rust function `RsDbtest::run()` is as follows. It will be exposed to the Go host application through Reactr. It uses named queries such as `PGInsertUser` and `PGSelectUserWithUUID` to operate the database. Those queries are defined in the Go host application, and we will see them later. - -```rust -use suborbital::runnable::*; -use suborbital::db; -use suborbital::util; -use suborbital::db::query; -use suborbital::log; -use uuid::Uuid; - -struct RsDbtest{} - -impl Runnable for RsDbtest { - fn run(&self, _: Vec) -> Result, RunErr> { - let uuid = Uuid::new_v4().to_string(); - - let mut args: Vec = Vec::new(); - args.push(query::QueryArg::new("uuid", uuid.as_str())); - args.push(query::QueryArg::new("email", "connor@suborbital.dev")); - - match db::insert("PGInsertUser", args) { - Ok(_) => log::info("insert successful"), - Err(e) => { - return Err(RunErr::new(500, e.message.as_str())) - } - }; - - let mut args2: Vec = Vec::new(); - args2.push(query::QueryArg::new("uuid", uuid.as_str())); - - match db::update("PGUpdateUserWithUUID", args2.clone()) { - Ok(rows) => log::info(format!("update: {}", util::to_string(rows).as_str()).as_str()), - Err(e) => { - return Err(RunErr::new(500, e.message.as_str())) - } - } - - match db::select("PGSelectUserWithUUID", args2.clone()) { - Ok(result) => log::info(format!("select: {}", util::to_string(result).as_str()).as_str()), - Err(e) => { - return Err(RunErr::new(500, e.message.as_str())) - } - } - - match db::delete("PGDeleteUserWithUUID", args2.clone()) { - Ok(rows) => log::info(format!("delete: {}", util::to_string(rows).as_str()).as_str()), - Err(e) => { - return Err(RunErr::new(500, e.message.as_str())) - } - } - - ... ... - } -} -``` - -Let's build the Rust function into a WebAssembly bytecode file. - -```bash -cd rs-db -cargo build --target wasm32-wasi --release -cp target/wasm32-wasi/release/rs_db.wasm .. -cd .. -``` - -### Database query: Go host application - -The [Go host app](https://github.com/second-state/wasm-learning/blob/master/reactr/db/main.go) first defines the SQL queries and gives each of them a name. We will then pass those queries to the Reactr runtime as a configuration. - -```go -func main() { - dbConnString, exists := os.LookupEnv("REACTR_DB_CONN_STRING") - if !exists { - fmt.Println("skipping as conn string env var not set") - return - } - - q1 := rcap.Query{ - Type: rcap.QueryTypeInsert, - Name: "PGInsertUser", - VarCount: 2, - Query: ` - INSERT INTO users (uuid, email, created_at, state, identifier) - VALUES ($1, $2, NOW(), 'A', 12345)`, - } - - q2 := rcap.Query{ - Type: rcap.QueryTypeSelect, - Name: "PGSelectUserWithUUID", - VarCount: 1, - Query: ` - SELECT * FROM users - WHERE uuid = $1`, - } - - q3 := rcap.Query{ - Type: rcap.QueryTypeUpdate, - Name: "PGUpdateUserWithUUID", - VarCount: 1, - Query: ` - UPDATE users SET state='B' WHERE uuid = $1`, - } - - q4 := rcap.Query{ - Type: rcap.QueryTypeDelete, - Name: "PGDeleteUserWithUUID", - VarCount: 1, - Query: ` - DELETE FROM users WHERE uuid = $1`, - } - - config := rcap.DefaultConfigWithDB(vlog.Default(), rcap.DBTypePostgres, dbConnString, []rcap.Query{q1, q2, q3, q4}) - - r, err := rt.NewWithConfig(config) - if err != nil { - fmt.Println(err) - return - } - - ... ... -} -``` - -Then, we can run the WebAssembly function from Reactr. - -```go -func main() { - ... ... - - doWasm := r.Register("rs-db", rwasm.NewRunner("./rs_db.wasm")) - - res, err := doWasm(nil).Then() - if err != nil { - fmt.Println(err) - return - } - - fmt.Println(string(res.([]byte))) -} -``` - -Finally, let's run the Go host application and see the results printed to the console. - -> You must use the `-tags wasmedge` flag to take advantage of the performance and extended WebAssembly APIs provided by WasmEdge. - -```bash -export REACTR_DB_CONN_STRING='postgresql://postgres:12345@127.0.0.1:5432/reactr' -go mod tidy -go run -tags wasmedge main.go -``` - -## Embed JavaScript in Go - -As we mentioned, a key feature of the WasmEdge Runtime is its advanced [JavaScript support](../../../write_wasm/js.md), which allows JavaScript programs to run in lightweight, high-performance, safe, multi-language, and [Kubernetes-managed WasmEdge containers](../../kubernetes.md). A simple example of embedded JavaScript function in Reactr is [available here](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs). - -### JavaScript example - -The [JavaScript example function](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/hello.js) is very simple. It just returns a string value. - -```javascript -let h = 'hello'; -let w = 'wasmedge'; -`${h} ${w}`; -``` - -### JavaScript example: Go host application - -The [Go host app](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/main.go) uses the Reactr API to run WasmEdge's standard JavaScript interpreter [rs_embed_js.wasm](https://github.com/second-state/wasm-learning/blob/master/reactr/quickjs/rs_embed_js.wasm). You can build your own version of JavaScript interpreter by modifying [this Rust project](https://github.com/second-state/wasm-learning/tree/master/reactr/quickjs/rs-embed-js). - -> Learn more about how to embed [JavaScript code in Rust](https://github.com/second-state/wasmedge-quickjs/tree/main/examples/embed_js), and how to [use Rust to implement JavaScript APIs](../../../write_wasm/js/rust.md) in WasmEdge. - -The Go host application just need to start the job for `rs_embed_js.wasm` and pass the JavaScript content to it. The Go application can then capture and print the return value from JavaScript. - -```go -func main() { - r := rt.New() - doWasm := r.Register("hello-quickjs", rwasm.NewRunner("./rs_embed_js.wasm")) - - code, err := ioutil.ReadFile(os.Args[1]) - if err != nil { - fmt.Print(err) - } - res, err := doWasm(code).Then() - if err != nil { - fmt.Println(err) - return - } - - fmt.Println(string(res.([]byte))) -} -``` - -Run the Go host application as follows. - -```bash -$ cd quickjs -$ go mod tidy -$ go run -tags wasmedge main.go hello.js -String(JsString(hello wasmedge)) -``` - -The printed result shows the type information of the string in Rust and Go APIs. You can strip out this information by changing the Rust or Go applications. - -### JavaScript example: Feature examples - -WasmEdge supports many advanced JavaScript features. For the next step, you could try our [React SSR example](https://github.com/second-state/wasmedge-quickjs/tree/main/example_js/react_ssr) to generate an HTML UI from a Reactr function! You can just build the `dist/main.js` from the React SSR example, and copy it over to this example folder to see it in action! - -```bash -$ cd quickjs -# copy over the dist/main.js file from the react ssr example -$ go mod tidy -$ go run -tags wasmedge main.go main.js -
This is home
This is page
-UnDefined -``` diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md deleted file mode 100644 index 5cff16996..000000000 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/secondstate.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -sidebar_position: 3 ---- - -# Second State Functions - -Second State Functions, powered by WasmEdge, supports the Rust language as a first class citizen. - -It could - -- [Handle text-based input and output](https://www.secondstate.io/articles/getting-started-with-function-as-a-service-in-rust/) -- [Use Binary data as function input and output](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) -- [Mix bytes and strings in function argument and return value](https://www.secondstate.io/articles/use-binary-data-as-function-input-and-output/) -- [Use webhooks as function input and output](https://www.secondstate.io/articles/internet-of-functions-webhooks/) -- [Access internet resources via a `http_proxy` API](https://www.secondstate.io/articles/internet-of-functions-http-proxy/) -- [Running TensorFlow models at native speed via the WasmEdge TensorFlow API](https://www.secondstate.io/articles/wasi-tensorflow/) - -Check out the [Second State Functions](https://www.secondstate.io/faas/) website for more tutorials. From 8e429cbfc879326db6cd66e2e32d8dc586b56c51 Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Fri, 11 Aug 2023 11:04:09 +0530 Subject: [PATCH 04/11] Fixed Broken Links Signed-off-by: Adithya Krishna --- docs/embed/use-case/web-app.md | 2 +- .../current/embed/use-case/web-app.md | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/embed/use-case/web-app.md b/docs/embed/use-case/web-app.md index 05036703a..ba737d89d 100644 --- a/docs/embed/use-case/web-app.md +++ b/docs/embed/use-case/web-app.md @@ -98,4 +98,4 @@ $ sudo buildah build --annotation "module.wasm.image/variant=compat-smart" -t wa $ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker://docker.io/wasmedge/example-wasi:latest ``` -That's it! Now you can try to run it in [CRI-O](../cri/crio.md#run-a-simple-webassembly-app) or [Kubernetes](../kubernetes/kubernetes-crio.md#a-simple-webassembly-app)! +That's it! Now you can try to run it in [CRI-O](../../develop/deploy/cri-runtime/crio-crun.md) or [Kubernetes](../../develop/deploy/kubernetes/kubernetes-cri-o.md)! diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md index 05036703a..333ba9d6a 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md @@ -98,4 +98,5 @@ $ sudo buildah build --annotation "module.wasm.image/variant=compat-smart" -t wa $ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker://docker.io/wasmedge/example-wasi:latest ``` -That's it! Now you can try to run it in [CRI-O](../cri/crio.md#run-a-simple-webassembly-app) or [Kubernetes](../kubernetes/kubernetes-crio.md#a-simple-webassembly-app)! +That's it! Now you can try to run it in [CRI-O](../../develop/deploy/cri-runtime/crio-crun.md) or [Kubernetes](../../develop/deploy/kubernetes/kubernetes-cri-o.md)! + From bacf9d6cb83307e86285bf351e676286be30da45 Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Fri, 11 Aug 2023 11:12:02 +0530 Subject: [PATCH 05/11] Fixed Linting Issues Signed-off-by: Adithya Krishna --- .../current/embed/use-case/web-app.md | 1 - 1 file changed, 1 deletion(-) diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md index 333ba9d6a..ba737d89d 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md @@ -99,4 +99,3 @@ $ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker:// ``` That's it! Now you can try to run it in [CRI-O](../../develop/deploy/cri-runtime/crio-crun.md) or [Kubernetes](../../develop/deploy/kubernetes/kubernetes-cri-o.md)! - From debacb26db42e47663a5a5f20a5af639a2d4566e Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Fri, 25 Aug 2023 12:42:21 +0530 Subject: [PATCH 06/11] feat: added use-cases Signed-off-by: Adithya Krishna --- docs/start/usage/_category_.json | 8 +++++++ docs/start/usage/use-cases.md | 21 +++++++++++++++++++ .../current/start/usage/_category_.json | 8 +++++++ .../current/start/usage/use-cases.md | 21 +++++++++++++++++++ 4 files changed, 58 insertions(+) create mode 100644 docs/start/usage/_category_.json create mode 100644 docs/start/usage/use-cases.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/_category_.json create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md diff --git a/docs/start/usage/_category_.json b/docs/start/usage/_category_.json new file mode 100644 index 000000000..6fd885429 --- /dev/null +++ b/docs/start/usage/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "WasmEdge Use-cases", + "position": 5, + "link": { + "type": "generated-index", + "description": "In this chapter, we will discuss use-cases of WasmEdge" + } +} diff --git a/docs/start/usage/use-cases.md b/docs/start/usage/use-cases.md new file mode 100644 index 000000000..d9a6f3556 --- /dev/null +++ b/docs/start/usage/use-cases.md @@ -0,0 +1,21 @@ +--- +sidebar_position: 1 +--- + +Featuring AOT compiler optimization, WasmEdge is one of the fastest WebAssembly runtimes on the market today. Therefore WasmEdge is widely used in edge computing, automotive, Jamstack, serverless, SaaS, service mesh, and even blockchain applications. + +- Modern web apps feature rich UIs that are rendered in the browser and/or on the edge cloud. WasmEdge works with popular web UI frameworks, such as React, Vue, Yew, and Percy, to support isomorphic [server-side rendering (SSR)](../../embed/use-case/ssr-modern-ui.md) functions on edge servers. It could also support server-side rendering of Unity3D animations and AI-generated interactive videos for web applications on the edge cloud. + +- WasmEdge provides a lightweight, secure and high-performance runtime for microservices. It is fully compatible with application service frameworks such as Dapr, and service orchestrators like Kubernetes. WasmEdge microservices can run on edge servers, and have access to distributed cache, to support both stateless and stateful business logic functions for modern web apps. Also related: Serverless function-as-a-service in public clouds. + +- [Serverless SaaS (Software-as-a-Service)](../../embed/use-case/serverless-saas.md) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. + +- [Smart device apps](../../embed/use-case/wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. + +- WasmEdge could support high performance DSLs (Domain Specific Languages) or act as a cloud-native JavaScript runtime by embedding a JS execution engine or interpreter. + +- Developers can leverage container tools such as [Kubernetes](../../develop/deploy/kubernetes/kubernetes-containerd-crun.md), Docker and CRI-O to deploy, manage, and run lightweight WebAssembly applications. + +- WasmEdge applications can be plugged into existing application frameworks or platforms. + +If you have any great ideas on WasmEdge, don't hesitate to open a GitHub issue to discuss together. \ No newline at end of file diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/_category_.json b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/_category_.json new file mode 100644 index 000000000..6fd885429 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "WasmEdge Use-cases", + "position": 5, + "link": { + "type": "generated-index", + "description": "In this chapter, we will discuss use-cases of WasmEdge" + } +} diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md new file mode 100644 index 000000000..d9a6f3556 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md @@ -0,0 +1,21 @@ +--- +sidebar_position: 1 +--- + +Featuring AOT compiler optimization, WasmEdge is one of the fastest WebAssembly runtimes on the market today. Therefore WasmEdge is widely used in edge computing, automotive, Jamstack, serverless, SaaS, service mesh, and even blockchain applications. + +- Modern web apps feature rich UIs that are rendered in the browser and/or on the edge cloud. WasmEdge works with popular web UI frameworks, such as React, Vue, Yew, and Percy, to support isomorphic [server-side rendering (SSR)](../../embed/use-case/ssr-modern-ui.md) functions on edge servers. It could also support server-side rendering of Unity3D animations and AI-generated interactive videos for web applications on the edge cloud. + +- WasmEdge provides a lightweight, secure and high-performance runtime for microservices. It is fully compatible with application service frameworks such as Dapr, and service orchestrators like Kubernetes. WasmEdge microservices can run on edge servers, and have access to distributed cache, to support both stateless and stateful business logic functions for modern web apps. Also related: Serverless function-as-a-service in public clouds. + +- [Serverless SaaS (Software-as-a-Service)](../../embed/use-case/serverless-saas.md) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. + +- [Smart device apps](../../embed/use-case/wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. + +- WasmEdge could support high performance DSLs (Domain Specific Languages) or act as a cloud-native JavaScript runtime by embedding a JS execution engine or interpreter. + +- Developers can leverage container tools such as [Kubernetes](../../develop/deploy/kubernetes/kubernetes-containerd-crun.md), Docker and CRI-O to deploy, manage, and run lightweight WebAssembly applications. + +- WasmEdge applications can be plugged into existing application frameworks or platforms. + +If you have any great ideas on WasmEdge, don't hesitate to open a GitHub issue to discuss together. \ No newline at end of file From 3e0a63109df98c48fe7d7c90d66732f1f683913c Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Fri, 25 Aug 2023 12:50:08 +0530 Subject: [PATCH 07/11] chore: reordered contents Signed-off-by: Adithya Krishna --- .../start/usage}/serverless/_category_.json | 2 +- .../start/usage}/serverless/aws.md | 0 .../start/usage}/serverless/netlify.md | 0 .../start/usage}/serverless/tencent.md | 0 .../start/usage}/serverless/vercel.md | 0 docs/start/usage/use-cases.md | 4 +- .../start/usage}/wasm-smart-devices.md | 0 .../use-case => docs/start/usage}/web-app.md | 0 .../start/usage/serverless/_category_.json | 8 + .../current/start/usage/serverless/aws.md | 272 ++++++++++++++++++ .../current/start/usage/serverless/netlify.md | 189 ++++++++++++ .../current/start/usage/serverless/tencent.md | 11 + .../current/start/usage/serverless/vercel.md | 191 ++++++++++++ .../current/start/usage/use-cases.md | 4 +- .../current/start/usage/wasm-smart-devices.md | 14 + .../current/start/usage/web-app.md | 101 +++++++ 16 files changed, 791 insertions(+), 5 deletions(-) rename {i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case => docs/start/usage}/serverless/_category_.json (85%) rename {i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case => docs/start/usage}/serverless/aws.md (100%) rename {i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case => docs/start/usage}/serverless/netlify.md (100%) rename {i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case => docs/start/usage}/serverless/tencent.md (100%) rename {i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case => docs/start/usage}/serverless/vercel.md (100%) rename {i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case => docs/start/usage}/wasm-smart-devices.md (100%) rename {i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case => docs/start/usage}/web-app.md (100%) create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/_category_.json create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/aws.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/netlify.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/tencent.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/vercel.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/wasm-smart-devices.md create mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/web-app.md diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/_category_.json b/docs/start/usage/serverless/_category_.json similarity index 85% rename from i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/_category_.json rename to docs/start/usage/serverless/_category_.json index 53e7dfdd2..075ab1a18 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/_category_.json +++ b/docs/start/usage/serverless/_category_.json @@ -1,5 +1,5 @@ { - "label": "Serviceless Platforms", + "label": "Serverless Platforms", "position": 9, "link": { "type": "generated-index", diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md b/docs/start/usage/serverless/aws.md similarity index 100% rename from i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/aws.md rename to docs/start/usage/serverless/aws.md diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md b/docs/start/usage/serverless/netlify.md similarity index 100% rename from i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/netlify.md rename to docs/start/usage/serverless/netlify.md diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/tencent.md b/docs/start/usage/serverless/tencent.md similarity index 100% rename from i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/tencent.md rename to docs/start/usage/serverless/tencent.md diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md b/docs/start/usage/serverless/vercel.md similarity index 100% rename from i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/serverless/vercel.md rename to docs/start/usage/serverless/vercel.md diff --git a/docs/start/usage/use-cases.md b/docs/start/usage/use-cases.md index d9a6f3556..3cd2b5683 100644 --- a/docs/start/usage/use-cases.md +++ b/docs/start/usage/use-cases.md @@ -8,9 +8,9 @@ Featuring AOT compiler optimization, WasmEdge is one of the fastest WebAssembly - WasmEdge provides a lightweight, secure and high-performance runtime for microservices. It is fully compatible with application service frameworks such as Dapr, and service orchestrators like Kubernetes. WasmEdge microservices can run on edge servers, and have access to distributed cache, to support both stateless and stateful business logic functions for modern web apps. Also related: Serverless function-as-a-service in public clouds. -- [Serverless SaaS (Software-as-a-Service)](../../embed/use-case/serverless-saas.md) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. +- [Serverless SaaS (Software-as-a-Service)](./serverless/serverless-platforms) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. -- [Smart device apps](../../embed/use-case/wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. +- [Smart device apps](./wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. - WasmEdge could support high performance DSLs (Domain Specific Languages) or act as a cloud-native JavaScript runtime by embedding a JS execution engine or interpreter. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/wasm-smart-devices.md b/docs/start/usage/wasm-smart-devices.md similarity index 100% rename from i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/wasm-smart-devices.md rename to docs/start/usage/wasm-smart-devices.md diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md b/docs/start/usage/web-app.md similarity index 100% rename from i18n/zh/docusaurus-plugin-content-docs/current/embed/use-case/web-app.md rename to docs/start/usage/web-app.md diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/_category_.json b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/_category_.json new file mode 100644 index 000000000..075ab1a18 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "Serverless Platforms", + "position": 9, + "link": { + "type": "generated-index", + "description": "Run WebAssembly as an alternative lightweight runtime side-by-side with Docker and microVMs in cloud native infrastructure" + } +} diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/aws.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/aws.md new file mode 100644 index 000000000..c23c56105 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/aws.md @@ -0,0 +1,272 @@ +--- +sidebar_position: 1 +--- + +# WebAssembly Serverless Functions in AWS Lambda + +In this article, we will show you two serverless functions in Rust and WasmEdge deployed on AWS Lambda. One is the image processing function, the other one is the TensorFlow inference function. + +> For the insight on why WasmEdge on AWS Lambda, please refer to the article [WebAssembly Serverless Functions in AWS Lambda](https://www.secondstate.io/articles/webassembly-serverless-functions-in-aws-lambda/) + +## Prerequisites + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on AWS Lambda. We will assume that you already have the basic knowledge of how to work with Next.js and Lambda. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://second-state.github.io/aws-lambda-wasm-runtime/) deployed through GitHub Pages is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/aws-lambda-wasm-runtime) to get started. To deploy the application on AWS Lambda, follow the guide in the repository [README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md). + +### Create the function + +This repo is a standard Next.js application. The backend serverless function is in the `api/functions/image_grayscale` folder. The `src/main.rs` file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> When we build the docker image, `api/pre.sh` is executed. `pre.sh` installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +### Create the service script to load the function + +The [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice that [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const { spawn } = require('child_process'); +const path = require('path'); + +function _runWasm(reqBody) { + return new Promise((resolve) => { + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + resolve(buf); + }); + + wasmedge.stdin.write(reqBody); + wasmedge.stdin.end(''); + }); +} +``` + +The `exports.handler` part of `hello.js` exports an async function handler, used to handle different events every time the serverless function is called. In this example, we simply process the image by calling the function above and return the result, but more complicated event-handling behavior may be defined based on your need. We also need to return some `Access-Control-Allow` headers to avoid [Cross-Origin Resource Sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) errors when calling the serverless function from a browser. You can read more about CORS errors [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS/Errors) if you encounter them when replicating our example. + +```javascript +exports.handler = async function (event, context) { + var typedArray = new Uint8Array( + event.body.match(/[\da-f]{2}/gi).map(function (h) { + return parseInt(h, 16); + }), + ); + let buf = await _runWasm(typedArray); + return { + statusCode: 200, + headers: { + 'Access-Control-Allow-Headers': + 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': + 'DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT', + }, + body: buf.toString('hex'), + }; +}; +``` + +### Build the Docker image for Lambda deployment + +Now we have the WebAssembly bytecode function and the script to load and connect to the web request. In order to deploy them as a function service on AWS Lambda, you still need to package the whole thing into a Docker image. + +We are not going to cover in detail about how to build the Docker image and deploy on AWS Lambda, as there are detailed steps in the [Deploy section of the repository README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md#deploy). However, we will highlight some lines in the [`Dockerfile`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/Dockerfile) for you to avoid some pitfalls. + +```dockerfile +FROM public.ecr.aws/lambda/nodejs:14 + +# Change directory to /var/task +WORKDIR /var/task + +RUN yum update -y && yum install -y curl tar gzip + +# Bundle and pre-compile the wasm files +COPY *.wasm ./ +COPY pre.sh ./ +RUN chmod +x pre.sh +RUN ./pre.sh + +# Bundle the JS files +COPY *.js ./ + +CMD [ "hello.handler" ] +``` + +First, we are building the image from [AWS Lambda's Node.js base image](https://hub.docker.com/r/amazon/aws-lambda-nodejs). The advantage of using AWS Lambda's base image is that it includes the [Lambda Runtime Interface Client (RIC)](https://github.com/aws/aws-lambda-nodejs-runtime-interface-client), which we need to implement in our Docker image as it is required by AWS Lambda. The Amazon Linux uses `yum` as the package manager. + +> These base images contain the Amazon Linux Base operating system, the runtime for a given language, dependencies and the Lambda Runtime Interface Client (RIC), which implements the Lambda [Runtime API](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-api.html). The Lambda Runtime Interface Client allows your runtime to receive requests from and send requests to the Lambda service. + +Second, we need to put our function and all its dependencies in the `/var/task` directory. Files in other folders will not be executed by AWS Lambda. + +Third, we need to define the default command when we start our container. `CMD [ "hello.handler" ]` means that we will call the `handler` function in `hello.js` whenever our serverless function is called. Recall that we have defined and exported the handler function in the previous steps through `exports.handler = ...` in `hello.js`. + +### Optional: test the Docker image locally + +Docker images built from AWS Lambda's base images can be tested locally following [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-test.html). Local testing requires [AWS Lambda Runtime Interface Emulator (RIE)](https://github.com/aws/aws-lambda-runtime-interface-emulator), which is already installed in all of AWS Lambda's base images. To test your image, first, start the Docker container by running: + +```bash +docker run -p 9000:8080 myfunction:latest +``` + +This command sets a function endpoint on your local machine at `http://localhost:9000/2015-03-31/functions/function/invocations`. + +Then, from a separate terminal window, run: + +```bash +curl -XPOST "http://localhost:9000/2015-03-31/functions/function/invocations" -d '{}' +``` + +And you should get your expected output in the terminal. + +If you don't want to use a base image from AWS Lambda, you can also use your own base image and install RIC and/or RIE while building your Docker image. Just follow **Create an image from an alternative base image** section from [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-create.html). + +That's it! After building your Docker image, you can deploy it to AWS Lambda following steps outlined in the repository [README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md#deploy). Now your serverless function is ready to rock! + +## Example 2: AI inference + +The [second demo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/tensorflow) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the `api/functions/image-classification` folder in the `tensorflow` branch. The `src/main.rs` file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the `api/pre.sh` script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. The handler function is similar to our previous example, and is omitted here. + +```javascript +const { spawn } = require('child_process'); +const path = require('path'); + +function _runWasm(reqBody) { + return new Promise(resolve => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + {env: {'LD_LIBRARY_PATH': __dirname}} + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + resolve(d.join('')); + }); + + wasmedge.stdin.write(reqBody); + wasmedge.stdin.end(''); + }); +} + +exports.handler = ... // _runWasm(reqBody) is called in the handler +``` + +You can build your Docker image and deploy the function in the same way as outlined in the previous example. Now you have created a web app for subject classification! + +Next, it's your turn to use the [aws-lambda-wasm-runtime repo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/main) as a template to develop Rust serverless function on AWS Lambda. Looking forward to your great work. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/netlify.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/netlify.md new file mode 100644 index 000000000..0f4b82db2 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/netlify.md @@ -0,0 +1,189 @@ +--- +sidebar_position: 2 +--- + +# WebAssembly Serverless Functions in Netlify + +In this article we will show you two serverless functions in Rust and WasmEdge deployed on Netlify. One is the image processing function, the other one is the TensorFlow inference function. + +> For more insights on why WasmEdge on Netlify, please refer to the article [WebAssembly Serverless Functions in Netlify](https://www.secondstate.io/articles/netlify-wasmedge-webassembly-rust-serverless/). + +## Prerequisite + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on Netlify. We will assume that you already have the basic knowledge of how to work with Next.js and Netlify. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://60fe22f9ff623f0007656040--reverent-hodgkin-dc1f51.netlify.app/) deployed on Netlify is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/netlify-wasm-runtime) to get started. To deploy the application on Netlify, just [add your github repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/). + +This repo is a standard Next.js application for the Netlify platform. The backend serverless function is in the [`api/functions/image_grayscale`](https://github.com/second-state/netlify-wasm-runtime/tree/main/api/functions/image-grayscale) folder. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/functions/image-grayscale/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> The Netlify function runs [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/pre.sh) upon setting up the serverless environment. It installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +The [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + + res.setHeader('Content-Type', req.headers['image-type']); + res.send(buf); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +}; +``` + +That's it. [Deploy the repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and you now have a Netlify Jamstack app with a high-performance Rust and WebAssembly based serverless backend. + +## Example 2: AI inference + +The [second demo](https://60ff7e2d10fe590008db70a9--reverent-hodgkin-dc1f51.netlify.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/pre.sh) script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + { env: { LD_LIBRARY_PATH: __dirname } }, + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + res.setHeader('Content-Type', `text/plain`); + res.send(d.join('')); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +}; +``` + +You can now [deploy your forked repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and have a web app for subject classification. + +Next, it's your turn to develop Rust serverless functions in Netlify using the [netlify-wasm-runtime repo](https://github.com/second-state/netlify-wasm-runtime) as a template. Looking forward to your great work. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/tencent.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/tencent.md new file mode 100644 index 000000000..9937f7149 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/tencent.md @@ -0,0 +1,11 @@ +--- +sidebar_position: 4 +--- + +# WebAssembly serverless functions on Tencent Cloud + +As the main users of Tencent Cloud are from China, so the tutorial is [written in Chinese](https://my.oschina.net/u/4532842/blog/5172639). + +We also provide a code template for deploying serverless WebAssembly functions on Tencent Cloud, please check out [the tencent-scf-wasm-runtime repo](https://github.com/second-state/tencent-scf-wasm-runtime). + +Fork the repo and start writing your own rust functions. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/vercel.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/vercel.md new file mode 100644 index 000000000..3ef87bd5c --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/serverless/vercel.md @@ -0,0 +1,191 @@ +--- +sidebar_position: 5 +--- + +# Rust and WebAssembly Serverless functions in Vercel + +In this article, we will show you two serverless functions in Rust and WasmEdge deployed on Vercel. One is the image processing function, the other one is the TensorFlow inference function. + +> For more insights on why WasmEdge on Vercel, please refer to the article [Rust and WebAssembly Serverless Functions in Vercel](https://www.secondstate.io/articles/vercel-wasmedge-webassembly-rust/). + +## Prerequisite + +Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. + +```bash +rustup target add wasm32-wasi +``` + +The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on Vercel. We will assume that you already have the basic knowledge of how to work with Vercel. + +## Example 1: Image processing + +Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://vercel-wasm-runtime.vercel.app/) deployed on Vercel is available. + +Fork the [demo application’s GitHub repo](https://github.com/second-state/vercel-wasm-runtime) to get started. To deploy the application on Vercel, just [import the Github repo](https://vercel.com/docs/git#deploying-a-git-repository) from [Vercel for Github](https://vercel.com/docs/git/vercel-for-github) web page. + +This repo is a standard Next.js application for the Vercel platform. The backend serverless function is in the [`api/functions/image_grayscale`](https://github.com/second-state/vercel-wasm-runtime/tree/main/api/functions/image-grayscale) folder. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/functions/image-grayscale/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. + +```rust +use hex; +use std::io::{self, Read}; +use image::{ImageOutputFormat, ImageFormat}; + +fn main() { + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); + let img = image::load_from_memory(&buf).unwrap(); + let filtered = img.grayscale(); + let mut buf = vec![]; + match image_format_detected { + ImageFormat::Gif => { + filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); + }, + _ => { + filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); + }, + }; + io::stdout().write_all(&buf).unwrap(); + io::stdout().flush().unwrap(); +} +``` + +You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-grayscale/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/grayscale.wasm ../../ +``` + +> Vercel runs [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/pre.sh) upon setting up the serverless environment. It installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. + +The [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/hello.js) file conforms Vercel serverless specification. It loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ + path.join(__dirname, 'grayscale.so'), + ]); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + let buf = Buffer.concat(d); + + res.setHeader('Content-Type', req.headers['image-type']); + res.send(buf); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +}; +``` + +That's it. [Deploy the repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and you now have a Vercel Jamstack app with a high-performance Rust and WebAssembly based serverless backend. + +## Example 2: AI inference + +The [second demo](https://vercel-wasm-runtime.vercel.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. + +It is in [the same GitHub repo](https://github.com/second-state/vercel-wasm-runtime) as the previous example but in the `tensorflow` branch. Note: when you [import this GitHub repo](https://vercel.com/docs/git#deploying-a-git-repository) on the Vercel website, it will create a [preview URL](https://vercel.com/docs/platform/deployments#preview) for each branch. The `tensorflow` branch would have its own deployment URL. + +The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/vercel-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. + +```rust +pub fn main() { + // Step 1: Load the TFLite model + let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); + let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); + + // Step 2: Read image from STDIN + let mut buf = Vec::new(); + io::stdin().read_to_end(&mut buf).unwrap(); + + // Step 3: Resize the input image for the tensorflow model + let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); + + // Step 4: AI inference + let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); + session.add_input("input", &flat_img, &[1, 224, 224, 3]) + .run(); + let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); + + // Step 5: Find the food label that responds to the highest probability in res_vec + // ... ... + let mut label_lines = labels.lines(); + for _i in 0..max_index { + label_lines.next(); + } + + // Step 6: Generate the output text + let class_name = label_lines.next().unwrap().to_string(); + if max_value > 50 { + println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); + } else { + println!("It does not appears to be any food item in the picture."); + } +} +``` + +You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. + +```bash +cd api/functions/image-classification/ +cargo build --release --target wasm32-wasi +``` + +Copy the build artifacts to the `api` folder. + +```bash +cp target/wasm32-wasi/release/classify.wasm ../../ +``` + +Again, the [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/pre.sh) script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. + +The [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/hello.js) file conforms Vercel serverless specification. It loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. + +```javascript +const fs = require('fs'); +const { spawn } = require('child_process'); +const path = require('path'); + +module.exports = (req, res) => { + const wasmedge = spawn( + path.join(__dirname, 'wasmedge-tensorflow-lite'), + [path.join(__dirname, 'classify.so')], + { env: { LD_LIBRARY_PATH: __dirname } }, + ); + + let d = []; + wasmedge.stdout.on('data', (data) => { + d.push(data); + }); + + wasmedge.on('close', (code) => { + res.setHeader('Content-Type', `text/plain`); + res.send(d.join('')); + }); + + wasmedge.stdin.write(req.body); + wasmedge.stdin.end(''); +}; +``` + +You can now [deploy your forked repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and have a web app for subject classification. + +Next, it's your turn to use [the vercel-wasm-runtime repo](https://github.com/second-state/vercel-wasm-runtime) as a template to develop your own Rust serverless functions in Vercel. Looking forward to your great work. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md index d9a6f3556..3cd2b5683 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md @@ -8,9 +8,9 @@ Featuring AOT compiler optimization, WasmEdge is one of the fastest WebAssembly - WasmEdge provides a lightweight, secure and high-performance runtime for microservices. It is fully compatible with application service frameworks such as Dapr, and service orchestrators like Kubernetes. WasmEdge microservices can run on edge servers, and have access to distributed cache, to support both stateless and stateful business logic functions for modern web apps. Also related: Serverless function-as-a-service in public clouds. -- [Serverless SaaS (Software-as-a-Service)](../../embed/use-case/serverless-saas.md) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. +- [Serverless SaaS (Software-as-a-Service)](./serverless/serverless-platforms) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. -- [Smart device apps](../../embed/use-case/wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. +- [Smart device apps](./wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. - WasmEdge could support high performance DSLs (Domain Specific Languages) or act as a cloud-native JavaScript runtime by embedding a JS execution engine or interpreter. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/wasm-smart-devices.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/wasm-smart-devices.md new file mode 100644 index 000000000..17cd9ad77 --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/wasm-smart-devices.md @@ -0,0 +1,14 @@ +--- +sidebar_position: 4 +--- + +# WasmEdge On Smart Devices + +Smart device apps could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e., the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, OTA upgradability, and developer productivity. WasmEdge runs on the following device OSes. + +- [Android](/category/build-and-run-wasmedge-on-android) +- [OpenHarmony](../../contribute/source/os/openharmony.md) +- [Raspberry Pi](../../contribute/source/os/raspberrypi.md) +- [The seL4 RTOS](../../contribute/source/os/sel4.md) + +With WasmEdge on both the device and the edge server, we can support [isomorphic Server-Side Rendering (SSR)](../../develop/rust/ssr.md) and [microservices](../../start/build-and-run/docker_wasm.md#deploy-the-microservice-example) for rich-client mobile applications that are both portable and upgradeable. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/web-app.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/web-app.md new file mode 100644 index 000000000..ba737d89d --- /dev/null +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/web-app.md @@ -0,0 +1,101 @@ +--- +sidebar_position: 9 +--- + +# A simple WebAssembly example + +In this article, I will show you how to build a container image for a WebAssembly application. It can then be started and managed by Kubernetes ecosystem tools, such as CRI-O, Docker, crun, and Kubernetes. + +## Prerequisites + +> If you simply want a wasm bytecode file to test as a container image, you can skip the building process and just [download the wasm file here](https://github.com/second-state/wasm-learning/blob/master/cli/wasi/wasi_example_main.wasm). + +If you have not done so already, follow these simple instructions to [install Rust](https://www.rust-lang.org/tools/install). + +## Download example code + +```bash +git clone https://github.com/second-state/wasm-learning +cd wasm-learning/cli/wasi +``` + +## Build the WASM bytecode + +```bash +rustup target add wasm32-wasi +cargo build --target wasm32-wasi --release +``` + +The wasm bytecode application is in the `target/wasm32-wasi/release/wasi_example_main.wasm` file. You can now publish and use it as a container image. + +## Apply executable permission on the Wasm bytecode + +```bash +chmod +x target/wasm32-wasi/release/wasi_example_main.wasm +``` + +## Create Dockerfile + +Create a file called `Dockerfile` in the `target/wasm32-wasi/release/` folder with the following content: + +```dockerfile +FROM scratch +ADD wasi_example_main.wasm / +CMD ["/wasi_example_main.wasm"] +``` + +## Create container image with annotations + +> Please note that adding self-defined annotation is still a new feature in buildah. + +The `crun` container runtime can start the above WebAssembly-based container image. But it requires the `module.wasm.image/variant=compat-smart` annotation on the container image to indicate that it is a WebAssembly application without a guest OS. You can find the details in [Official crun repo](https://github.com/containers/crun/blob/main/docs/wasm-wasi-example.md). + +To add `module.wasm.image/variant=compat-smart` annotation in the container image, you will need the latest [buildah](https://buildah.io/). Currently, Docker does not support this feature. Please follow [the install instructions of buildah](https://github.com/containers/buildah/blob/main/install.md) to build the latest buildah binary. + +### Build and install the latest buildah on Ubuntu + +On Ubuntu zesty and xenial, use these commands to prepare for buildah. + +```bash +sudo apt-get -y install software-properties-common + +export OS="xUbuntu_20.04" +sudo bash -c "echo \"deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/ /\" > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list" +sudo bash -c "curl -L https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/Release.key | apt-key add -" + +sudo add-apt-repository -y ppa:alexlarsson/flatpak +sudo apt-get -y -qq update +sudo apt-get -y install bats git libapparmor-dev libdevmapper-dev libglib2.0-dev libgpgme-dev libseccomp-dev libselinux1-dev skopeo-containers go-md2man containers-common +sudo apt-get -y install golang-1.16 make +``` + +Then, follow these steps to build and install buildah on Ubuntu. + +```bash +mkdir -p ~/buildah +cd ~/buildah +export GOPATH=`pwd` +git clone https://github.com/containers/buildah ./src/github.com/containers/buildah +cd ./src/github.com/containers/buildah +PATH=/usr/lib/go-1.16/bin:$PATH make +cp bin/buildah /usr/bin/buildah +buildah --help +``` + +### Create and publish a container image with buildah + +In the `target/wasm32-wasi/release/` folder, do the following. + +```bash +$ sudo buildah build --annotation "module.wasm.image/variant=compat-smart" -t wasm-wasi-example . +# make sure docker is install and running +# systemctl status docker +# to make sure regular user can use docker +# sudo usermod -aG docker $USER +# newgrp docker + +# You may need to use docker login to create the `~/.docker/config.json` for auth. +$ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker://docker.io/wasmedge/example-wasi:latest +``` + +That's it! Now you can try to run it in [CRI-O](../../develop/deploy/cri-runtime/crio-crun.md) or [Kubernetes](../../develop/deploy/kubernetes/kubernetes-cri-o.md)! From 73402bd9616bf1ab64e9538b211da24b1bfff7ea Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Thu, 31 Aug 2023 07:52:27 +0530 Subject: [PATCH 08/11] Removed embedded docs Signed-off-by: Adithya Krishna --- .../embed/use-case/serverless/_category_.json | 8 - docs/embed/use-case/serverless/aws.md | 272 ------------------ docs/embed/use-case/serverless/netlify.md | 189 ------------ docs/embed/use-case/serverless/tencent.md | 11 - docs/embed/use-case/serverless/vercel.md | 191 ------------ docs/embed/use-case/web-app.md | 101 ------- .../current/start/usage/web-app.md | 101 ------- 7 files changed, 873 deletions(-) delete mode 100644 docs/embed/use-case/serverless/_category_.json delete mode 100644 docs/embed/use-case/serverless/aws.md delete mode 100644 docs/embed/use-case/serverless/netlify.md delete mode 100644 docs/embed/use-case/serverless/tencent.md delete mode 100644 docs/embed/use-case/serverless/vercel.md delete mode 100644 docs/embed/use-case/web-app.md delete mode 100644 i18n/zh/docusaurus-plugin-content-docs/current/start/usage/web-app.md diff --git a/docs/embed/use-case/serverless/_category_.json b/docs/embed/use-case/serverless/_category_.json deleted file mode 100644 index 53e7dfdd2..000000000 --- a/docs/embed/use-case/serverless/_category_.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "label": "Serviceless Platforms", - "position": 9, - "link": { - "type": "generated-index", - "description": "Run WebAssembly as an alternative lightweight runtime side-by-side with Docker and microVMs in cloud native infrastructure" - } -} diff --git a/docs/embed/use-case/serverless/aws.md b/docs/embed/use-case/serverless/aws.md deleted file mode 100644 index c23c56105..000000000 --- a/docs/embed/use-case/serverless/aws.md +++ /dev/null @@ -1,272 +0,0 @@ ---- -sidebar_position: 1 ---- - -# WebAssembly Serverless Functions in AWS Lambda - -In this article, we will show you two serverless functions in Rust and WasmEdge deployed on AWS Lambda. One is the image processing function, the other one is the TensorFlow inference function. - -> For the insight on why WasmEdge on AWS Lambda, please refer to the article [WebAssembly Serverless Functions in AWS Lambda](https://www.secondstate.io/articles/webassembly-serverless-functions-in-aws-lambda/) - -## Prerequisites - -Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. - -```bash -rustup target add wasm32-wasi -``` - -The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on AWS Lambda. We will assume that you already have the basic knowledge of how to work with Next.js and Lambda. - -## Example 1: Image processing - -Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://second-state.github.io/aws-lambda-wasm-runtime/) deployed through GitHub Pages is available. - -Fork the [demo application’s GitHub repo](https://github.com/second-state/aws-lambda-wasm-runtime) to get started. To deploy the application on AWS Lambda, follow the guide in the repository [README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md). - -### Create the function - -This repo is a standard Next.js application. The backend serverless function is in the `api/functions/image_grayscale` folder. The `src/main.rs` file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. - -```rust -use hex; -use std::io::{self, Read}; -use image::{ImageOutputFormat, ImageFormat}; - -fn main() { - let mut buf = Vec::new(); - io::stdin().read_to_end(&mut buf).unwrap(); - - let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); - let img = image::load_from_memory(&buf).unwrap(); - let filtered = img.grayscale(); - let mut buf = vec![]; - match image_format_detected { - ImageFormat::Gif => { - filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); - }, - _ => { - filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); - }, - }; - io::stdout().write_all(&buf).unwrap(); - io::stdout().flush().unwrap(); -} -``` - -You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. - -```bash -cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi -``` - -Copy the build artifacts to the `api` folder. - -```bash -cp target/wasm32-wasi/release/grayscale.wasm ../../ -``` - -> When we build the docker image, `api/pre.sh` is executed. `pre.sh` installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. - -### Create the service script to load the function - -The [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice that [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/main/api/pre.sh) for better performance. - -```javascript -const { spawn } = require('child_process'); -const path = require('path'); - -function _runWasm(reqBody) { - return new Promise((resolve) => { - const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ - path.join(__dirname, 'grayscale.so'), - ]); - - let d = []; - wasmedge.stdout.on('data', (data) => { - d.push(data); - }); - - wasmedge.on('close', (code) => { - let buf = Buffer.concat(d); - resolve(buf); - }); - - wasmedge.stdin.write(reqBody); - wasmedge.stdin.end(''); - }); -} -``` - -The `exports.handler` part of `hello.js` exports an async function handler, used to handle different events every time the serverless function is called. In this example, we simply process the image by calling the function above and return the result, but more complicated event-handling behavior may be defined based on your need. We also need to return some `Access-Control-Allow` headers to avoid [Cross-Origin Resource Sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) errors when calling the serverless function from a browser. You can read more about CORS errors [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS/Errors) if you encounter them when replicating our example. - -```javascript -exports.handler = async function (event, context) { - var typedArray = new Uint8Array( - event.body.match(/[\da-f]{2}/gi).map(function (h) { - return parseInt(h, 16); - }), - ); - let buf = await _runWasm(typedArray); - return { - statusCode: 200, - headers: { - 'Access-Control-Allow-Headers': - 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token', - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Methods': - 'DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT', - }, - body: buf.toString('hex'), - }; -}; -``` - -### Build the Docker image for Lambda deployment - -Now we have the WebAssembly bytecode function and the script to load and connect to the web request. In order to deploy them as a function service on AWS Lambda, you still need to package the whole thing into a Docker image. - -We are not going to cover in detail about how to build the Docker image and deploy on AWS Lambda, as there are detailed steps in the [Deploy section of the repository README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md#deploy). However, we will highlight some lines in the [`Dockerfile`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/Dockerfile) for you to avoid some pitfalls. - -```dockerfile -FROM public.ecr.aws/lambda/nodejs:14 - -# Change directory to /var/task -WORKDIR /var/task - -RUN yum update -y && yum install -y curl tar gzip - -# Bundle and pre-compile the wasm files -COPY *.wasm ./ -COPY pre.sh ./ -RUN chmod +x pre.sh -RUN ./pre.sh - -# Bundle the JS files -COPY *.js ./ - -CMD [ "hello.handler" ] -``` - -First, we are building the image from [AWS Lambda's Node.js base image](https://hub.docker.com/r/amazon/aws-lambda-nodejs). The advantage of using AWS Lambda's base image is that it includes the [Lambda Runtime Interface Client (RIC)](https://github.com/aws/aws-lambda-nodejs-runtime-interface-client), which we need to implement in our Docker image as it is required by AWS Lambda. The Amazon Linux uses `yum` as the package manager. - -> These base images contain the Amazon Linux Base operating system, the runtime for a given language, dependencies and the Lambda Runtime Interface Client (RIC), which implements the Lambda [Runtime API](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-api.html). The Lambda Runtime Interface Client allows your runtime to receive requests from and send requests to the Lambda service. - -Second, we need to put our function and all its dependencies in the `/var/task` directory. Files in other folders will not be executed by AWS Lambda. - -Third, we need to define the default command when we start our container. `CMD [ "hello.handler" ]` means that we will call the `handler` function in `hello.js` whenever our serverless function is called. Recall that we have defined and exported the handler function in the previous steps through `exports.handler = ...` in `hello.js`. - -### Optional: test the Docker image locally - -Docker images built from AWS Lambda's base images can be tested locally following [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-test.html). Local testing requires [AWS Lambda Runtime Interface Emulator (RIE)](https://github.com/aws/aws-lambda-runtime-interface-emulator), which is already installed in all of AWS Lambda's base images. To test your image, first, start the Docker container by running: - -```bash -docker run -p 9000:8080 myfunction:latest -``` - -This command sets a function endpoint on your local machine at `http://localhost:9000/2015-03-31/functions/function/invocations`. - -Then, from a separate terminal window, run: - -```bash -curl -XPOST "http://localhost:9000/2015-03-31/functions/function/invocations" -d '{}' -``` - -And you should get your expected output in the terminal. - -If you don't want to use a base image from AWS Lambda, you can also use your own base image and install RIC and/or RIE while building your Docker image. Just follow **Create an image from an alternative base image** section from [this guide](https://docs.aws.amazon.com/lambda/latest/dg/images-create.html). - -That's it! After building your Docker image, you can deploy it to AWS Lambda following steps outlined in the repository [README](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/README.md#deploy). Now your serverless function is ready to rock! - -## Example 2: AI inference - -The [second demo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/tensorflow) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. - -It is in [the same GitHub repo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the `api/functions/image-classification` folder in the `tensorflow` branch. The `src/main.rs` file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. - -```rust -pub fn main() { - // Step 1: Load the TFLite model - let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); - let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); - - // Step 2: Read image from STDIN - let mut buf = Vec::new(); - io::stdin().read_to_end(&mut buf).unwrap(); - - // Step 3: Resize the input image for the tensorflow model - let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); - - // Step 4: AI inference - let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); - session.add_input("input", &flat_img, &[1, 224, 224, 3]) - .run(); - let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); - - // Step 5: Find the food label that responds to the highest probability in res_vec - // ... ... - let mut label_lines = labels.lines(); - for _i in 0..max_index { - label_lines.next(); - } - - // Step 6: Generate the output text - let class_name = label_lines.next().unwrap().to_string(); - if max_value > 50 { - println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); - } else { - println!("It does not appears to be any food item in the picture."); - } -} -``` - -You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. - -```bash -cd api/functions/image-classification/ -cargo build --release --target wasm32-wasi -``` - -Copy the build artifacts to the `api` folder. - -```bash -cp target/wasm32-wasi/release/classify.wasm ../../ -``` - -Again, the `api/pre.sh` script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. - -The [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/aws-lambda-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. The handler function is similar to our previous example, and is omitted here. - -```javascript -const { spawn } = require('child_process'); -const path = require('path'); - -function _runWasm(reqBody) { - return new Promise(resolve => { - const wasmedge = spawn( - path.join(__dirname, 'wasmedge-tensorflow-lite'), - [path.join(__dirname, 'classify.so')], - {env: {'LD_LIBRARY_PATH': __dirname}} - ); - - let d = []; - wasmedge.stdout.on('data', (data) => { - d.push(data); - }); - - wasmedge.on('close', (code) => { - resolve(d.join('')); - }); - - wasmedge.stdin.write(reqBody); - wasmedge.stdin.end(''); - }); -} - -exports.handler = ... // _runWasm(reqBody) is called in the handler -``` - -You can build your Docker image and deploy the function in the same way as outlined in the previous example. Now you have created a web app for subject classification! - -Next, it's your turn to use the [aws-lambda-wasm-runtime repo](https://github.com/second-state/aws-lambda-wasm-runtime/tree/main) as a template to develop Rust serverless function on AWS Lambda. Looking forward to your great work. diff --git a/docs/embed/use-case/serverless/netlify.md b/docs/embed/use-case/serverless/netlify.md deleted file mode 100644 index 0f4b82db2..000000000 --- a/docs/embed/use-case/serverless/netlify.md +++ /dev/null @@ -1,189 +0,0 @@ ---- -sidebar_position: 2 ---- - -# WebAssembly Serverless Functions in Netlify - -In this article we will show you two serverless functions in Rust and WasmEdge deployed on Netlify. One is the image processing function, the other one is the TensorFlow inference function. - -> For more insights on why WasmEdge on Netlify, please refer to the article [WebAssembly Serverless Functions in Netlify](https://www.secondstate.io/articles/netlify-wasmedge-webassembly-rust-serverless/). - -## Prerequisite - -Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. - -```bash -rustup target add wasm32-wasi -``` - -The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on Netlify. We will assume that you already have the basic knowledge of how to work with Next.js and Netlify. - -## Example 1: Image processing - -Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://60fe22f9ff623f0007656040--reverent-hodgkin-dc1f51.netlify.app/) deployed on Netlify is available. - -Fork the [demo application’s GitHub repo](https://github.com/second-state/netlify-wasm-runtime) to get started. To deploy the application on Netlify, just [add your github repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/). - -This repo is a standard Next.js application for the Netlify platform. The backend serverless function is in the [`api/functions/image_grayscale`](https://github.com/second-state/netlify-wasm-runtime/tree/main/api/functions/image-grayscale) folder. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/functions/image-grayscale/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. - -```rust -use hex; -use std::io::{self, Read}; -use image::{ImageOutputFormat, ImageFormat}; - -fn main() { - let mut buf = Vec::new(); - io::stdin().read_to_end(&mut buf).unwrap(); - - let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); - let img = image::load_from_memory(&buf).unwrap(); - let filtered = img.grayscale(); - let mut buf = vec![]; - match image_format_detected { - ImageFormat::Gif => { - filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); - }, - _ => { - filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); - }, - }; - io::stdout().write_all(&buf).unwrap(); - io::stdout().flush().unwrap(); -} -``` - -You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. - -```bash -cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi -``` - -Copy the build artifacts to the `api` folder. - -```bash -cp target/wasm32-wasi/release/grayscale.wasm ../../ -``` - -> The Netlify function runs [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/pre.sh) upon setting up the serverless environment. It installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. - -The [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/main/api/pre.sh) for better performance. - -```javascript -const fs = require('fs'); -const { spawn } = require('child_process'); -const path = require('path'); - -module.exports = (req, res) => { - const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ - path.join(__dirname, 'grayscale.so'), - ]); - - let d = []; - wasmedge.stdout.on('data', (data) => { - d.push(data); - }); - - wasmedge.on('close', (code) => { - let buf = Buffer.concat(d); - - res.setHeader('Content-Type', req.headers['image-type']); - res.send(buf); - }); - - wasmedge.stdin.write(req.body); - wasmedge.stdin.end(''); -}; -``` - -That's it. [Deploy the repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and you now have a Netlify Jamstack app with a high-performance Rust and WebAssembly based serverless backend. - -## Example 2: AI inference - -The [second demo](https://60ff7e2d10fe590008db70a9--reverent-hodgkin-dc1f51.netlify.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. - -It is in [the same GitHub repo](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow) as the previous example but in the `tensorflow` branch. The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/netlify-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. - -```rust -pub fn main() { - // Step 1: Load the TFLite model - let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); - let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); - - // Step 2: Read image from STDIN - let mut buf = Vec::new(); - io::stdin().read_to_end(&mut buf).unwrap(); - - // Step 3: Resize the input image for the tensorflow model - let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); - - // Step 4: AI inference - let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); - session.add_input("input", &flat_img, &[1, 224, 224, 3]) - .run(); - let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); - - // Step 5: Find the food label that responds to the highest probability in res_vec - // ... ... - let mut label_lines = labels.lines(); - for _i in 0..max_index { - label_lines.next(); - } - - // Step 6: Generate the output text - let class_name = label_lines.next().unwrap().to_string(); - if max_value > 50 { - println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); - } else { - println!("It does not appears to be any food item in the picture."); - } -} -``` - -You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. - -```bash -cd api/functions/image-classification/ -cargo build --release --target wasm32-wasi -``` - -Copy the build artifacts to the `api` folder. - -```bash -cp target/wasm32-wasi/release/classify.wasm ../../ -``` - -Again, the [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/pre.sh) script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. - -The [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/hello.js) script loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/netlify-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. - -```javascript -const fs = require('fs'); -const { spawn } = require('child_process'); -const path = require('path'); - -module.exports = (req, res) => { - const wasmedge = spawn( - path.join(__dirname, 'wasmedge-tensorflow-lite'), - [path.join(__dirname, 'classify.so')], - { env: { LD_LIBRARY_PATH: __dirname } }, - ); - - let d = []; - wasmedge.stdout.on('data', (data) => { - d.push(data); - }); - - wasmedge.on('close', (code) => { - res.setHeader('Content-Type', `text/plain`); - res.send(d.join('')); - }); - - wasmedge.stdin.write(req.body); - wasmedge.stdin.end(''); -}; -``` - -You can now [deploy your forked repo to Netlify](https://www.netlify.com/blog/2016/09/29/a-step-by-step-guide-deploying-on-netlify/) and have a web app for subject classification. - -Next, it's your turn to develop Rust serverless functions in Netlify using the [netlify-wasm-runtime repo](https://github.com/second-state/netlify-wasm-runtime) as a template. Looking forward to your great work. diff --git a/docs/embed/use-case/serverless/tencent.md b/docs/embed/use-case/serverless/tencent.md deleted file mode 100644 index 9937f7149..000000000 --- a/docs/embed/use-case/serverless/tencent.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -sidebar_position: 4 ---- - -# WebAssembly serverless functions on Tencent Cloud - -As the main users of Tencent Cloud are from China, so the tutorial is [written in Chinese](https://my.oschina.net/u/4532842/blog/5172639). - -We also provide a code template for deploying serverless WebAssembly functions on Tencent Cloud, please check out [the tencent-scf-wasm-runtime repo](https://github.com/second-state/tencent-scf-wasm-runtime). - -Fork the repo and start writing your own rust functions. diff --git a/docs/embed/use-case/serverless/vercel.md b/docs/embed/use-case/serverless/vercel.md deleted file mode 100644 index 3ef87bd5c..000000000 --- a/docs/embed/use-case/serverless/vercel.md +++ /dev/null @@ -1,191 +0,0 @@ ---- -sidebar_position: 5 ---- - -# Rust and WebAssembly Serverless functions in Vercel - -In this article, we will show you two serverless functions in Rust and WasmEdge deployed on Vercel. One is the image processing function, the other one is the TensorFlow inference function. - -> For more insights on why WasmEdge on Vercel, please refer to the article [Rust and WebAssembly Serverless Functions in Vercel](https://www.secondstate.io/articles/vercel-wasmedge-webassembly-rust/). - -## Prerequisite - -Since our demo WebAssembly functions are written in Rust, you will need a [Rust compiler](https://www.rust-lang.org/tools/install). Make sure that you install the `wasm32-wasi` compiler target as follows, in order to generate WebAssembly bytecode. - -```bash -rustup target add wasm32-wasi -``` - -The demo application front end is written in [Next.js](https://nextjs.org/), and deployed on Vercel. We will assume that you already have the basic knowledge of how to work with Vercel. - -## Example 1: Image processing - -Our first demo application allows users to upload an image and then invoke a serverless function to turn it into black and white. A [live demo](https://vercel-wasm-runtime.vercel.app/) deployed on Vercel is available. - -Fork the [demo application’s GitHub repo](https://github.com/second-state/vercel-wasm-runtime) to get started. To deploy the application on Vercel, just [import the Github repo](https://vercel.com/docs/git#deploying-a-git-repository) from [Vercel for Github](https://vercel.com/docs/git/vercel-for-github) web page. - -This repo is a standard Next.js application for the Vercel platform. The backend serverless function is in the [`api/functions/image_grayscale`](https://github.com/second-state/vercel-wasm-runtime/tree/main/api/functions/image-grayscale) folder. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/functions/image-grayscale/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the black-white image to the `STDOUT`. - -```rust -use hex; -use std::io::{self, Read}; -use image::{ImageOutputFormat, ImageFormat}; - -fn main() { - let mut buf = Vec::new(); - io::stdin().read_to_end(&mut buf).unwrap(); - - let image_format_detected: ImageFormat = image::guess_format(&buf).unwrap(); - let img = image::load_from_memory(&buf).unwrap(); - let filtered = img.grayscale(); - let mut buf = vec![]; - match image_format_detected { - ImageFormat::Gif => { - filtered.write_to(&mut buf, ImageOutputFormat::Gif).unwrap(); - }, - _ => { - filtered.write_to(&mut buf, ImageOutputFormat::Png).unwrap(); - }, - }; - io::stdout().write_all(&buf).unwrap(); - io::stdout().flush().unwrap(); -} -``` - -You can use Rust’s `cargo` tool to build the Rust program into WebAssembly bytecode or native code. - -```bash -cd api/functions/image-grayscale/ -cargo build --release --target wasm32-wasi -``` - -Copy the build artifacts to the `api` folder. - -```bash -cp target/wasm32-wasi/release/grayscale.wasm ../../ -``` - -> Vercel runs [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/pre.sh) upon setting up the serverless environment. It installs the WasmEdge runtime, and then compiles each WebAssembly bytecode program into a native `so` library for faster execution. - -The [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/hello.js) file conforms Vercel serverless specification. It loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/hello.js) runs the compiled `grayscale.so` file generated by [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/main/api/pre.sh) for better performance. - -```javascript -const fs = require('fs'); -const { spawn } = require('child_process'); -const path = require('path'); - -module.exports = (req, res) => { - const wasmedge = spawn(path.join(__dirname, 'wasmedge'), [ - path.join(__dirname, 'grayscale.so'), - ]); - - let d = []; - wasmedge.stdout.on('data', (data) => { - d.push(data); - }); - - wasmedge.on('close', (code) => { - let buf = Buffer.concat(d); - - res.setHeader('Content-Type', req.headers['image-type']); - res.send(buf); - }); - - wasmedge.stdin.write(req.body); - wasmedge.stdin.end(''); -}; -``` - -That's it. [Deploy the repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and you now have a Vercel Jamstack app with a high-performance Rust and WebAssembly based serverless backend. - -## Example 2: AI inference - -The [second demo](https://vercel-wasm-runtime.vercel.app/) application allows users to upload an image and then invoke a serverless function to classify the main subject on the image. - -It is in [the same GitHub repo](https://github.com/second-state/vercel-wasm-runtime) as the previous example but in the `tensorflow` branch. Note: when you [import this GitHub repo](https://vercel.com/docs/git#deploying-a-git-repository) on the Vercel website, it will create a [preview URL](https://vercel.com/docs/platform/deployments#preview) for each branch. The `tensorflow` branch would have its own deployment URL. - -The backend serverless function for image classification is in the [`api/functions/image-classification`](https://github.com/second-state/vercel-wasm-runtime/tree/tensorflow/api/functions/image-classification) folder in the `tensorflow` branch. The [`src/main.rs`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/functions/image-classification/src/main.rs) file contains the Rust program’s source code. The Rust program reads image data from the `STDIN`, and then outputs the text output to the `STDOUT`. It utilizes the WasmEdge Tensorflow API to run the AI inference. - -```rust -pub fn main() { - // Step 1: Load the TFLite model - let model_data: &[u8] = include_bytes!("models/mobilenet_v1_1.0_224/mobilenet_v1_1.0_224_quant.tflite"); - let labels = include_str!("models/mobilenet_v1_1.0_224/labels_mobilenet_quant_v1_224.txt"); - - // Step 2: Read image from STDIN - let mut buf = Vec::new(); - io::stdin().read_to_end(&mut buf).unwrap(); - - // Step 3: Resize the input image for the tensorflow model - let flat_img = wasmedge_tensorflow_interface::load_jpg_image_to_rgb8(&buf, 224, 224); - - // Step 4: AI inference - let mut session = wasmedge_tensorflow_interface::Session::new(&model_data, wasmedge_tensorflow_interface::ModelType::TensorFlowLite); - session.add_input("input", &flat_img, &[1, 224, 224, 3]) - .run(); - let res_vec: Vec = session.get_output("MobilenetV1/Predictions/Reshape_1"); - - // Step 5: Find the food label that responds to the highest probability in res_vec - // ... ... - let mut label_lines = labels.lines(); - for _i in 0..max_index { - label_lines.next(); - } - - // Step 6: Generate the output text - let class_name = label_lines.next().unwrap().to_string(); - if max_value > 50 { - println!("It {} a {} in the picture", confidence.to_string(), class_name, class_name); - } else { - println!("It does not appears to be any food item in the picture."); - } -} -``` - -You can use the `cargo` tool to build the Rust program into WebAssembly bytecode or native code. - -```bash -cd api/functions/image-classification/ -cargo build --release --target wasm32-wasi -``` - -Copy the build artifacts to the `api` folder. - -```bash -cp target/wasm32-wasi/release/classify.wasm ../../ -``` - -Again, the [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/pre.sh) script installs WasmEdge runtime and its Tensorflow dependencies in this application. It also compiles the `classify.wasm` bytecode program to the `classify.so` native shared library at the time of deployment. - -The [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/hello.js) file conforms Vercel serverless specification. It loads the WasmEdge runtime, starts the compiled WebAssembly program in WasmEdge, and passes the uploaded image data via `STDIN`. Notice [`api/hello.js`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/hello.js) runs the compiled `classify.so` file generated by [`api/pre.sh`](https://github.com/second-state/vercel-wasm-runtime/blob/tensorflow/api/pre.sh) for better performance. - -```javascript -const fs = require('fs'); -const { spawn } = require('child_process'); -const path = require('path'); - -module.exports = (req, res) => { - const wasmedge = spawn( - path.join(__dirname, 'wasmedge-tensorflow-lite'), - [path.join(__dirname, 'classify.so')], - { env: { LD_LIBRARY_PATH: __dirname } }, - ); - - let d = []; - wasmedge.stdout.on('data', (data) => { - d.push(data); - }); - - wasmedge.on('close', (code) => { - res.setHeader('Content-Type', `text/plain`); - res.send(d.join('')); - }); - - wasmedge.stdin.write(req.body); - wasmedge.stdin.end(''); -}; -``` - -You can now [deploy your forked repo to Vercel](https://vercel.com/docs/git#deploying-a-git-repository) and have a web app for subject classification. - -Next, it's your turn to use [the vercel-wasm-runtime repo](https://github.com/second-state/vercel-wasm-runtime) as a template to develop your own Rust serverless functions in Vercel. Looking forward to your great work. diff --git a/docs/embed/use-case/web-app.md b/docs/embed/use-case/web-app.md deleted file mode 100644 index ba737d89d..000000000 --- a/docs/embed/use-case/web-app.md +++ /dev/null @@ -1,101 +0,0 @@ ---- -sidebar_position: 9 ---- - -# A simple WebAssembly example - -In this article, I will show you how to build a container image for a WebAssembly application. It can then be started and managed by Kubernetes ecosystem tools, such as CRI-O, Docker, crun, and Kubernetes. - -## Prerequisites - -> If you simply want a wasm bytecode file to test as a container image, you can skip the building process and just [download the wasm file here](https://github.com/second-state/wasm-learning/blob/master/cli/wasi/wasi_example_main.wasm). - -If you have not done so already, follow these simple instructions to [install Rust](https://www.rust-lang.org/tools/install). - -## Download example code - -```bash -git clone https://github.com/second-state/wasm-learning -cd wasm-learning/cli/wasi -``` - -## Build the WASM bytecode - -```bash -rustup target add wasm32-wasi -cargo build --target wasm32-wasi --release -``` - -The wasm bytecode application is in the `target/wasm32-wasi/release/wasi_example_main.wasm` file. You can now publish and use it as a container image. - -## Apply executable permission on the Wasm bytecode - -```bash -chmod +x target/wasm32-wasi/release/wasi_example_main.wasm -``` - -## Create Dockerfile - -Create a file called `Dockerfile` in the `target/wasm32-wasi/release/` folder with the following content: - -```dockerfile -FROM scratch -ADD wasi_example_main.wasm / -CMD ["/wasi_example_main.wasm"] -``` - -## Create container image with annotations - -> Please note that adding self-defined annotation is still a new feature in buildah. - -The `crun` container runtime can start the above WebAssembly-based container image. But it requires the `module.wasm.image/variant=compat-smart` annotation on the container image to indicate that it is a WebAssembly application without a guest OS. You can find the details in [Official crun repo](https://github.com/containers/crun/blob/main/docs/wasm-wasi-example.md). - -To add `module.wasm.image/variant=compat-smart` annotation in the container image, you will need the latest [buildah](https://buildah.io/). Currently, Docker does not support this feature. Please follow [the install instructions of buildah](https://github.com/containers/buildah/blob/main/install.md) to build the latest buildah binary. - -### Build and install the latest buildah on Ubuntu - -On Ubuntu zesty and xenial, use these commands to prepare for buildah. - -```bash -sudo apt-get -y install software-properties-common - -export OS="xUbuntu_20.04" -sudo bash -c "echo \"deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/ /\" > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list" -sudo bash -c "curl -L https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/Release.key | apt-key add -" - -sudo add-apt-repository -y ppa:alexlarsson/flatpak -sudo apt-get -y -qq update -sudo apt-get -y install bats git libapparmor-dev libdevmapper-dev libglib2.0-dev libgpgme-dev libseccomp-dev libselinux1-dev skopeo-containers go-md2man containers-common -sudo apt-get -y install golang-1.16 make -``` - -Then, follow these steps to build and install buildah on Ubuntu. - -```bash -mkdir -p ~/buildah -cd ~/buildah -export GOPATH=`pwd` -git clone https://github.com/containers/buildah ./src/github.com/containers/buildah -cd ./src/github.com/containers/buildah -PATH=/usr/lib/go-1.16/bin:$PATH make -cp bin/buildah /usr/bin/buildah -buildah --help -``` - -### Create and publish a container image with buildah - -In the `target/wasm32-wasi/release/` folder, do the following. - -```bash -$ sudo buildah build --annotation "module.wasm.image/variant=compat-smart" -t wasm-wasi-example . -# make sure docker is install and running -# systemctl status docker -# to make sure regular user can use docker -# sudo usermod -aG docker $USER -# newgrp docker - -# You may need to use docker login to create the `~/.docker/config.json` for auth. -$ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker://docker.io/wasmedge/example-wasi:latest -``` - -That's it! Now you can try to run it in [CRI-O](../../develop/deploy/cri-runtime/crio-crun.md) or [Kubernetes](../../develop/deploy/kubernetes/kubernetes-cri-o.md)! diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/web-app.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/web-app.md deleted file mode 100644 index ba737d89d..000000000 --- a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/web-app.md +++ /dev/null @@ -1,101 +0,0 @@ ---- -sidebar_position: 9 ---- - -# A simple WebAssembly example - -In this article, I will show you how to build a container image for a WebAssembly application. It can then be started and managed by Kubernetes ecosystem tools, such as CRI-O, Docker, crun, and Kubernetes. - -## Prerequisites - -> If you simply want a wasm bytecode file to test as a container image, you can skip the building process and just [download the wasm file here](https://github.com/second-state/wasm-learning/blob/master/cli/wasi/wasi_example_main.wasm). - -If you have not done so already, follow these simple instructions to [install Rust](https://www.rust-lang.org/tools/install). - -## Download example code - -```bash -git clone https://github.com/second-state/wasm-learning -cd wasm-learning/cli/wasi -``` - -## Build the WASM bytecode - -```bash -rustup target add wasm32-wasi -cargo build --target wasm32-wasi --release -``` - -The wasm bytecode application is in the `target/wasm32-wasi/release/wasi_example_main.wasm` file. You can now publish and use it as a container image. - -## Apply executable permission on the Wasm bytecode - -```bash -chmod +x target/wasm32-wasi/release/wasi_example_main.wasm -``` - -## Create Dockerfile - -Create a file called `Dockerfile` in the `target/wasm32-wasi/release/` folder with the following content: - -```dockerfile -FROM scratch -ADD wasi_example_main.wasm / -CMD ["/wasi_example_main.wasm"] -``` - -## Create container image with annotations - -> Please note that adding self-defined annotation is still a new feature in buildah. - -The `crun` container runtime can start the above WebAssembly-based container image. But it requires the `module.wasm.image/variant=compat-smart` annotation on the container image to indicate that it is a WebAssembly application without a guest OS. You can find the details in [Official crun repo](https://github.com/containers/crun/blob/main/docs/wasm-wasi-example.md). - -To add `module.wasm.image/variant=compat-smart` annotation in the container image, you will need the latest [buildah](https://buildah.io/). Currently, Docker does not support this feature. Please follow [the install instructions of buildah](https://github.com/containers/buildah/blob/main/install.md) to build the latest buildah binary. - -### Build and install the latest buildah on Ubuntu - -On Ubuntu zesty and xenial, use these commands to prepare for buildah. - -```bash -sudo apt-get -y install software-properties-common - -export OS="xUbuntu_20.04" -sudo bash -c "echo \"deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/ /\" > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list" -sudo bash -c "curl -L https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/Release.key | apt-key add -" - -sudo add-apt-repository -y ppa:alexlarsson/flatpak -sudo apt-get -y -qq update -sudo apt-get -y install bats git libapparmor-dev libdevmapper-dev libglib2.0-dev libgpgme-dev libseccomp-dev libselinux1-dev skopeo-containers go-md2man containers-common -sudo apt-get -y install golang-1.16 make -``` - -Then, follow these steps to build and install buildah on Ubuntu. - -```bash -mkdir -p ~/buildah -cd ~/buildah -export GOPATH=`pwd` -git clone https://github.com/containers/buildah ./src/github.com/containers/buildah -cd ./src/github.com/containers/buildah -PATH=/usr/lib/go-1.16/bin:$PATH make -cp bin/buildah /usr/bin/buildah -buildah --help -``` - -### Create and publish a container image with buildah - -In the `target/wasm32-wasi/release/` folder, do the following. - -```bash -$ sudo buildah build --annotation "module.wasm.image/variant=compat-smart" -t wasm-wasi-example . -# make sure docker is install and running -# systemctl status docker -# to make sure regular user can use docker -# sudo usermod -aG docker $USER -# newgrp docker - -# You may need to use docker login to create the `~/.docker/config.json` for auth. -$ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker://docker.io/wasmedge/example-wasi:latest -``` - -That's it! Now you can try to run it in [CRI-O](../../develop/deploy/cri-runtime/crio-crun.md) or [Kubernetes](../../develop/deploy/kubernetes/kubernetes-cri-o.md)! From 3b73941907458aa8a4fa8924a43791fe5d0542bb Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Thu, 31 Aug 2023 07:54:09 +0530 Subject: [PATCH 09/11] Removed web-app docs Signed-off-by: Adithya Krishna --- docs/start/usage/use-cases.md | 2 +- docs/start/usage/web-app.md | 101 ------------------ .../current/start/usage/use-cases.md | 2 +- 3 files changed, 2 insertions(+), 103 deletions(-) delete mode 100644 docs/start/usage/web-app.md diff --git a/docs/start/usage/use-cases.md b/docs/start/usage/use-cases.md index 3cd2b5683..9e7a43f1a 100644 --- a/docs/start/usage/use-cases.md +++ b/docs/start/usage/use-cases.md @@ -8,7 +8,7 @@ Featuring AOT compiler optimization, WasmEdge is one of the fastest WebAssembly - WasmEdge provides a lightweight, secure and high-performance runtime for microservices. It is fully compatible with application service frameworks such as Dapr, and service orchestrators like Kubernetes. WasmEdge microservices can run on edge servers, and have access to distributed cache, to support both stateless and stateful business logic functions for modern web apps. Also related: Serverless function-as-a-service in public clouds. -- [Serverless SaaS (Software-as-a-Service)](./serverless/serverless-platforms) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. +- [Serverless SaaS (Software-as-a-Service)](./serverless/) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. - [Smart device apps](./wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. diff --git a/docs/start/usage/web-app.md b/docs/start/usage/web-app.md deleted file mode 100644 index ba737d89d..000000000 --- a/docs/start/usage/web-app.md +++ /dev/null @@ -1,101 +0,0 @@ ---- -sidebar_position: 9 ---- - -# A simple WebAssembly example - -In this article, I will show you how to build a container image for a WebAssembly application. It can then be started and managed by Kubernetes ecosystem tools, such as CRI-O, Docker, crun, and Kubernetes. - -## Prerequisites - -> If you simply want a wasm bytecode file to test as a container image, you can skip the building process and just [download the wasm file here](https://github.com/second-state/wasm-learning/blob/master/cli/wasi/wasi_example_main.wasm). - -If you have not done so already, follow these simple instructions to [install Rust](https://www.rust-lang.org/tools/install). - -## Download example code - -```bash -git clone https://github.com/second-state/wasm-learning -cd wasm-learning/cli/wasi -``` - -## Build the WASM bytecode - -```bash -rustup target add wasm32-wasi -cargo build --target wasm32-wasi --release -``` - -The wasm bytecode application is in the `target/wasm32-wasi/release/wasi_example_main.wasm` file. You can now publish and use it as a container image. - -## Apply executable permission on the Wasm bytecode - -```bash -chmod +x target/wasm32-wasi/release/wasi_example_main.wasm -``` - -## Create Dockerfile - -Create a file called `Dockerfile` in the `target/wasm32-wasi/release/` folder with the following content: - -```dockerfile -FROM scratch -ADD wasi_example_main.wasm / -CMD ["/wasi_example_main.wasm"] -``` - -## Create container image with annotations - -> Please note that adding self-defined annotation is still a new feature in buildah. - -The `crun` container runtime can start the above WebAssembly-based container image. But it requires the `module.wasm.image/variant=compat-smart` annotation on the container image to indicate that it is a WebAssembly application without a guest OS. You can find the details in [Official crun repo](https://github.com/containers/crun/blob/main/docs/wasm-wasi-example.md). - -To add `module.wasm.image/variant=compat-smart` annotation in the container image, you will need the latest [buildah](https://buildah.io/). Currently, Docker does not support this feature. Please follow [the install instructions of buildah](https://github.com/containers/buildah/blob/main/install.md) to build the latest buildah binary. - -### Build and install the latest buildah on Ubuntu - -On Ubuntu zesty and xenial, use these commands to prepare for buildah. - -```bash -sudo apt-get -y install software-properties-common - -export OS="xUbuntu_20.04" -sudo bash -c "echo \"deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/ /\" > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list" -sudo bash -c "curl -L https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/Release.key | apt-key add -" - -sudo add-apt-repository -y ppa:alexlarsson/flatpak -sudo apt-get -y -qq update -sudo apt-get -y install bats git libapparmor-dev libdevmapper-dev libglib2.0-dev libgpgme-dev libseccomp-dev libselinux1-dev skopeo-containers go-md2man containers-common -sudo apt-get -y install golang-1.16 make -``` - -Then, follow these steps to build and install buildah on Ubuntu. - -```bash -mkdir -p ~/buildah -cd ~/buildah -export GOPATH=`pwd` -git clone https://github.com/containers/buildah ./src/github.com/containers/buildah -cd ./src/github.com/containers/buildah -PATH=/usr/lib/go-1.16/bin:$PATH make -cp bin/buildah /usr/bin/buildah -buildah --help -``` - -### Create and publish a container image with buildah - -In the `target/wasm32-wasi/release/` folder, do the following. - -```bash -$ sudo buildah build --annotation "module.wasm.image/variant=compat-smart" -t wasm-wasi-example . -# make sure docker is install and running -# systemctl status docker -# to make sure regular user can use docker -# sudo usermod -aG docker $USER -# newgrp docker - -# You may need to use docker login to create the `~/.docker/config.json` for auth. -$ sudo buildah push --authfile ~/.docker/config.json wasm-wasi-example docker://docker.io/wasmedge/example-wasi:latest -``` - -That's it! Now you can try to run it in [CRI-O](../../develop/deploy/cri-runtime/crio-crun.md) or [Kubernetes](../../develop/deploy/kubernetes/kubernetes-cri-o.md)! diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md index 3cd2b5683..9e7a43f1a 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md @@ -8,7 +8,7 @@ Featuring AOT compiler optimization, WasmEdge is one of the fastest WebAssembly - WasmEdge provides a lightweight, secure and high-performance runtime for microservices. It is fully compatible with application service frameworks such as Dapr, and service orchestrators like Kubernetes. WasmEdge microservices can run on edge servers, and have access to distributed cache, to support both stateless and stateful business logic functions for modern web apps. Also related: Serverless function-as-a-service in public clouds. -- [Serverless SaaS (Software-as-a-Service)](./serverless/serverless-platforms) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. +- [Serverless SaaS (Software-as-a-Service)](./serverless/) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. - [Smart device apps](./wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. From c41e1b0dd3c3ed7bfa096ec73edff5993b0b19df Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Thu, 31 Aug 2023 08:02:04 +0530 Subject: [PATCH 10/11] Fixed Broken Link Signed-off-by: Adithya Krishna --- docs/start/usage/use-cases.md | 4 +++- .../current/start/usage/use-cases.md | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/start/usage/use-cases.md b/docs/start/usage/use-cases.md index 9e7a43f1a..9218603d9 100644 --- a/docs/start/usage/use-cases.md +++ b/docs/start/usage/use-cases.md @@ -2,13 +2,15 @@ sidebar_position: 1 --- +# Use Cases + Featuring AOT compiler optimization, WasmEdge is one of the fastest WebAssembly runtimes on the market today. Therefore WasmEdge is widely used in edge computing, automotive, Jamstack, serverless, SaaS, service mesh, and even blockchain applications. - Modern web apps feature rich UIs that are rendered in the browser and/or on the edge cloud. WasmEdge works with popular web UI frameworks, such as React, Vue, Yew, and Percy, to support isomorphic [server-side rendering (SSR)](../../embed/use-case/ssr-modern-ui.md) functions on edge servers. It could also support server-side rendering of Unity3D animations and AI-generated interactive videos for web applications on the edge cloud. - WasmEdge provides a lightweight, secure and high-performance runtime for microservices. It is fully compatible with application service frameworks such as Dapr, and service orchestrators like Kubernetes. WasmEdge microservices can run on edge servers, and have access to distributed cache, to support both stateless and stateful business logic functions for modern web apps. Also related: Serverless function-as-a-service in public clouds. -- [Serverless SaaS (Software-as-a-Service)](./serverless/) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. +- [Serverless SaaS (Software-as-a-Service)](/category/serverless-platforms) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. - [Smart device apps](./wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. diff --git a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md index 9e7a43f1a..9218603d9 100644 --- a/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md +++ b/i18n/zh/docusaurus-plugin-content-docs/current/start/usage/use-cases.md @@ -2,13 +2,15 @@ sidebar_position: 1 --- +# Use Cases + Featuring AOT compiler optimization, WasmEdge is one of the fastest WebAssembly runtimes on the market today. Therefore WasmEdge is widely used in edge computing, automotive, Jamstack, serverless, SaaS, service mesh, and even blockchain applications. - Modern web apps feature rich UIs that are rendered in the browser and/or on the edge cloud. WasmEdge works with popular web UI frameworks, such as React, Vue, Yew, and Percy, to support isomorphic [server-side rendering (SSR)](../../embed/use-case/ssr-modern-ui.md) functions on edge servers. It could also support server-side rendering of Unity3D animations and AI-generated interactive videos for web applications on the edge cloud. - WasmEdge provides a lightweight, secure and high-performance runtime for microservices. It is fully compatible with application service frameworks such as Dapr, and service orchestrators like Kubernetes. WasmEdge microservices can run on edge servers, and have access to distributed cache, to support both stateless and stateful business logic functions for modern web apps. Also related: Serverless function-as-a-service in public clouds. -- [Serverless SaaS (Software-as-a-Service)](./serverless/) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. +- [Serverless SaaS (Software-as-a-Service)](/category/serverless-platforms) functions enables users to extend and customize their SaaS experience without operating their own API callback servers. The serverless functions can be embedded into the SaaS or reside on edge servers next to the SaaS servers. Developers simply upload functions to respond to SaaS events or to connect SaaS APIs. - [Smart device apps](./wasm-smart-devices.md) could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e, the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, and developer productivity. WasmEdge runs on Android, OpenHarmony, and seL4 RTOS devices. From 12a9633dc9755714a892e7d3ceff5132fc17b3f4 Mon Sep 17 00:00:00 2001 From: Adithya Krishna Date: Sat, 2 Sep 2023 13:12:36 +0530 Subject: [PATCH 11/11] Updated Docs Signed-off-by: Adithya Krishna --- docs/embed/use-case/wasm-smart-devices.md | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 docs/embed/use-case/wasm-smart-devices.md diff --git a/docs/embed/use-case/wasm-smart-devices.md b/docs/embed/use-case/wasm-smart-devices.md deleted file mode 100644 index 17cd9ad77..000000000 --- a/docs/embed/use-case/wasm-smart-devices.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -sidebar_position: 4 ---- - -# WasmEdge On Smart Devices - -Smart device apps could embed WasmEdge as a middleware runtime to render interactive content on the UI, connect to native device drivers, and access specialized hardware features (i.e., the GPU for AI inference). The benefits of the WasmEdge runtime over native-compiled machine code include security, safety, portability, manageability, OTA upgradability, and developer productivity. WasmEdge runs on the following device OSes. - -- [Android](/category/build-and-run-wasmedge-on-android) -- [OpenHarmony](../../contribute/source/os/openharmony.md) -- [Raspberry Pi](../../contribute/source/os/raspberrypi.md) -- [The seL4 RTOS](../../contribute/source/os/sel4.md) - -With WasmEdge on both the device and the edge server, we can support [isomorphic Server-Side Rendering (SSR)](../../develop/rust/ssr.md) and [microservices](../../start/build-and-run/docker_wasm.md#deploy-the-microservice-example) for rich-client mobile applications that are both portable and upgradeable.