diff --git a/ml.md b/ml.md index bce656f..2070674 100644 --- a/ml.md +++ b/ml.md @@ -10,7 +10,6 @@ Then, the user passes tensor inputs to the graph, computes the
  • Imports: @@ -90,94 +89,13 @@ containing a single value, use [1] for the tensor dimensions.

    -

    Import interface wasi:nn/errors@0.2.0-rc-2024-06-25

    -

    TODO: create function-specific errors (https://github.com/WebAssembly/wasi-nn/issues/42)

    -
    -

    Types

    -

    enum error-code

    -
    Enum Cases
    - -

    resource error

    -
    -

    Functions

    -

    [constructor]error: func

    -
    Params
    - -
    Return values
    - -

    [method]error.code: func

    -

    Return the error code.

    -
    Params
    - -
    Return values
    - -

    [method]error.data: func

    -

    Errors can propagated with backend specific status through a string value.

    -
    Params
    - -
    Return values
    -

    Import interface wasi:nn/inference@0.2.0-rc-2024-06-25

    An inference "session" is encapsulated by a graph-execution-context. This structure binds a graph to input tensors before compute-ing an inference:


    Types

    -

    type error

    -

    error

    -

    -#### `type tensor` -[`tensor`](#tensor) +

    type tensor

    +

    tensor

    #### `type tensor-data` [`tensor-data`](#tensor_data) @@ -197,7 +115,7 @@ e.g., cannot access a hardware feature requested

    Return values

    [method]graph-execution-context.compute: func

    Compute the inference on the given inputs.

    @@ -210,7 +128,7 @@ https://github.com/WebAssembly/wasi-nn/issues/43.

    Return values

    [method]graph-execution-context.get-output: func

    Extract the outputs after inference.

    @@ -221,18 +139,15 @@ https://github.com/WebAssembly/wasi-nn/issues/43.

    Return values

    Import interface wasi:nn/graph@0.2.0-rc-2024-06-25

    A graph is a loaded instance of a specific ML model (e.g., MobileNet) for a specific ML framework (e.g., TensorFlow):


    Types

    -

    type error

    -

    error

    -

    -#### `type tensor` -[`tensor`](#tensor) +

    type tensor

    +

    tensor

    #### `type graph-execution-context` [`graph-execution-context`](#graph_execution_context) @@ -274,7 +189,7 @@ graph IR in parts (e.g., OpenVINO stores its IR and weights separately).

    Return values

    load: func

    Load a graph from an opaque sequence of bytes to use for inference.

    @@ -286,7 +201,7 @@ graph IR in parts (e.g., OpenVINO stores its IR and weights separately).

    Return values

    load-by-name: func

    Load a graph by name.

    @@ -299,5 +214,5 @@ range from simple to complex (e.g., URLs?) and caching mechanisms of various kin
    Return values
    diff --git a/wit/wasi-nn.wit b/wit/wasi-nn.wit index 872e8cd..b125e64 100644 --- a/wit/wasi-nn.wit +++ b/wit/wasi-nn.wit @@ -12,7 +12,6 @@ world ml { import tensor; import graph; import inference; - import errors; } /// All inputs and outputs to an ML inference are represented as `tensor`s. @@ -61,13 +60,12 @@ interface tensor { /// A `graph` is a loaded instance of a specific ML model (e.g., MobileNet) for a specific ML /// framework (e.g., TensorFlow): interface graph { - use errors.{error}; use tensor.{tensor}; use inference.{graph-execution-context}; /// An execution graph for performing inference (i.e., a model). resource graph { - init-execution-context: func() -> result; + init-execution-context: func() -> result; } /// Describes the encoding of the graph. This allows the API to be implemented by various @@ -96,20 +94,19 @@ interface graph { type graph-builder = list; /// Load a `graph` from an opaque sequence of bytes to use for inference. - load: func(builder: list, encoding: graph-encoding, target: execution-target) -> result; + load: func(builder: list, encoding: graph-encoding, target: execution-target) -> result; /// Load a `graph` by name. /// /// How the host expects the names to be passed and how it stores the graphs for retrieval via /// this function is **implementation-specific**. This allows hosts to choose name schemes that /// range from simple to complex (e.g., URLs?) and caching mechanisms of various kinds. - load-by-name: func(name: string) -> result; + load-by-name: func(name: string) -> result; } /// An inference "session" is encapsulated by a `graph-execution-context`. This structure binds a /// `graph` to input tensors before `compute`-ing an inference: interface inference { - use errors.{error}; use tensor.{tensor, tensor-data}; /// Bind a `graph` to the input and output tensors for an inference. @@ -118,51 +115,16 @@ interface inference { /// (https://github.com/WebAssembly/wasi-nn/issues/43) resource graph-execution-context { /// Define the inputs to use for inference. - set-input: func(name: string, tensor: tensor) -> result<_, error>; + set-input: func(name: string, tensor: tensor) -> result<_, string>; /// Compute the inference on the given inputs. /// /// Note the expected sequence of calls: `set-input`, `compute`, `get-output`. TODO: this /// expectation could be removed as a part of /// https://github.com/WebAssembly/wasi-nn/issues/43. - compute: func() -> result<_, error>; + compute: func() -> result<_, string>; /// Extract the outputs after inference. - get-output: func(name: string) -> result; - } -} - -/// TODO: create function-specific errors (https://github.com/WebAssembly/wasi-nn/issues/42) -interface errors { - enum error-code { - // Caller module passed an invalid argument. - invalid-argument, - // Invalid encoding. - invalid-encoding, - // The operation timed out. - timeout, - // Runtime Error. - runtime-error, - // Unsupported operation. - unsupported-operation, - // Graph is too large. - too-large, - // Graph not found. - not-found, - // The operation is insecure or has insufficient privilege to be performed. - // e.g., cannot access a hardware feature requested - security, - // The operation failed for an unspecified reason. - unknown - } - - resource error { - constructor(code: error-code, data: string); - - /// Return the error code. - code: func() -> error-code; - - /// Errors can propagated with backend specific status through a string value. - data: func() -> string; + get-output: func(name: string) -> result; } }