Skip to content

Commit 4e793f8

Browse files
bmehta001Copilot
andcommitted
Add WinML 2.0 preview samples
Add the WinML 2.0 verification samples and the minimal SDK, packaging, and target-framework support required to run them against the preview WinML stack. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
1 parent 4ea0fca commit 4e793f8

40 files changed

Lines changed: 2081 additions & 34 deletions

File tree

.pipelines/templates/build-core-steps.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ steps:
5858
inputs:
5959
command: restore
6060
projects: '$(nsRoot)/src/FoundryLocalCore/Core/Core.csproj'
61-
restoreArguments: '-r ${{ parameters.flavor }} /p:Platform=${{ parameters.platform }} /p:IncludeWebService=true /p:Configuration=Release /p:NetTargetFramework=net9.0-windows10.0.26100.0 /p:UseWinML=true'
61+
restoreArguments: '-r ${{ parameters.flavor }} /p:Platform=${{ parameters.platform }} /p:IncludeWebService=true /p:Configuration=Release /p:NetTargetFramework=net9.0-windows10.0.18362.0 /p:UseWinML=true'
6262
feedsToUse: config
6363
nugetConfigPath: '$(nsRoot)/nuget.config'
6464

@@ -67,14 +67,14 @@ steps:
6767
inputs:
6868
command: build
6969
projects: '$(nsRoot)/src/FoundryLocalCore/Core/Core.csproj'
70-
arguments: '--no-restore -r ${{ parameters.flavor }} -f net9.0-windows10.0.26100.0 /p:Platform=${{ parameters.platform }} /p:IncludeWebService=true /p:Configuration=Release /p:NetTargetFramework=net9.0-windows10.0.26100.0 /p:UseWinML=true'
70+
arguments: '--no-restore -r ${{ parameters.flavor }} -f net9.0-windows10.0.18362.0 /p:Platform=${{ parameters.platform }} /p:IncludeWebService=true /p:Configuration=Release /p:NetTargetFramework=net9.0-windows10.0.18362.0 /p:UseWinML=true'
7171

7272
- task: DotNetCoreCLI@2
7373
displayName: 'Publish FLC AOT ${{ parameters.flavor }} (WinML)'
7474
inputs:
7575
command: publish
7676
projects: '$(nsRoot)/src/FoundryLocalCore/Core/Core.csproj'
77-
arguments: '--no-restore --no-build -r ${{ parameters.flavor }} -f net9.0-windows10.0.26100.0 /p:Platform=${{ parameters.platform }} /p:Configuration=Release /p:PublishAot=true /p:NetTargetFramework=net9.0-windows10.0.26100.0 /p:UseWinML=true'
77+
arguments: '--no-restore --no-build -r ${{ parameters.flavor }} -f net9.0-windows10.0.18362.0 /p:Platform=${{ parameters.platform }} /p:Configuration=Release /p:PublishAot=true /p:NetTargetFramework=net9.0-windows10.0.18362.0 /p:UseWinML=true'
7878
publishWebProjects: false
7979
zipAfterPublish: false
8080

@@ -84,7 +84,7 @@ steps:
8484
inputs:
8585
command: restore
8686
projects: '$(nsRoot)/test/FoundryLocalCore/Core/FoundryLocalCore.Tests.csproj'
87-
restoreArguments: '-r ${{ parameters.flavor }} /p:Platform=${{ parameters.platform }} /p:IncludeWebService=true /p:Configuration=Release /p:NetTargetFramework=net9.0-windows10.0.26100.0 /p:UseWinML=true'
87+
restoreArguments: '-r ${{ parameters.flavor }} /p:Platform=${{ parameters.platform }} /p:IncludeWebService=true /p:Configuration=Release /p:NetTargetFramework=net9.0-windows10.0.18362.0 /p:UseWinML=true'
8888
feedsToUse: config
8989
nugetConfigPath: '$(nsRoot)/nuget.config'
9090

@@ -93,7 +93,7 @@ steps:
9393
inputs:
9494
command: build
9595
projects: '$(nsRoot)/test/FoundryLocalCore/Core/FoundryLocalCore.Tests.csproj'
96-
arguments: '--no-restore -r ${{ parameters.flavor }} /p:Platform=${{ parameters.platform }} /p:IncludeWebService=true /p:Configuration=Release /p:NetTargetFramework=net9.0-windows10.0.26100.0 /p:UseWinML=true'
96+
arguments: '--no-restore -r ${{ parameters.flavor }} /p:Platform=${{ parameters.platform }} /p:IncludeWebService=true /p:Configuration=Release /p:NetTargetFramework=net9.0-windows10.0.18362.0 /p:UseWinML=true'
9797

9898
- task: DotNetCoreCLI@2
9999
displayName: 'Test FLC ${{ parameters.flavor }} (WinML)'

.pipelines/templates/package-core-steps.yml

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -100,10 +100,14 @@ steps:
100100
if ("${{ parameters.isWinML }}" -eq "True") {
101101
$nuspec = "$nsRoot/src/FoundryLocalCore/Core/WinMLNuget.nuspec"
102102
$id = "Microsoft.AI.Foundry.Local.Core.WinML"
103-
$ortVer = $pg.OnnxRuntimeFoundryVersionForWinML
104-
$genaiVer = $pg.OnnxRuntimeGenAIFoundryVersion
105-
$winAppSdkVer = $pg.WinAppSdkVersion
106-
$props = "id=$id;version=$(flcVersion);commitId=$(Build.SourceVersion);OnnxRuntimeFoundryVersionForWinML=$ortVer;OnnxRuntimeGenAIFoundryVersion=$genaiVer;WinAppSdkVersion=$winAppSdkVer"
103+
$ortVer = [string]$pg.OnnxRuntimeFoundryVersionForWinML
104+
$genaiVer = [string]$pg.OnnxRuntimeGenAIFoundryVersion
105+
$winMLVer = [string]$pg.WinMLVersion
106+
if ([string]::IsNullOrWhiteSpace($ortVer)) { throw "Directory.Packages.props is missing OnnxRuntimeFoundryVersionForWinML." }
107+
if ([string]::IsNullOrWhiteSpace($genaiVer)) { throw "Directory.Packages.props is missing OnnxRuntimeGenAIFoundryVersion." }
108+
if ([string]::IsNullOrWhiteSpace($winMLVer)) { throw "Directory.Packages.props is missing WinMLVersion." }
109+
110+
$props = "id=$id;version=$(flcVersion);commitId=$(Build.SourceVersion);OnnxRuntimeFoundryVersionForWinML=$ortVer;OnnxRuntimeGenAIFoundryVersion=$genaiVer;WinMLVersion=$winMLVer"
107111
} else {
108112
$nuspec = "$nsRoot/src/FoundryLocalCore/Core/NativeNuget.nuspec"
109113
$id = "Microsoft.AI.Foundry.Local.Core"

samples/README.md

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,8 @@ Explore complete working examples that demonstrate how to use Foundry Local —
88

99
| Language | Samples | Description |
1010
|----------|---------|-------------|
11-
| [**C#**](cs/) | 13 | .NET SDK samples including native chat, embeddings, audio transcription, tool calling, model management, web server, and tutorials. Uses WinML on Windows for hardware acceleration. |
12-
| [**JavaScript**](js/) | 13 | Node.js SDK samples including native chat, embeddings, audio transcription, Electron desktop app, Copilot SDK integration, LangChain, tool calling, web server, and tutorials. |
13-
| [**Python**](python/) | 10 | Python samples using the OpenAI-compatible API, including chat, embeddings, audio transcription, LangChain integration, tool calling, web server, and tutorials. |
14-
| [**Rust**](rust/) | 9 | Rust SDK samples including native chat, embeddings, audio transcription, tool calling, web server, and tutorials. |
11+
| [**C#**](cs/) | 14 | .NET SDK samples including native chat, embeddings, audio transcription, tool calling, model management, web server, tutorials, and WinML EP verification. Uses WinML on Windows for hardware acceleration. |
12+
| [**JavaScript**](js/) | 14 | Node.js SDK samples including native chat, embeddings, audio transcription, Electron desktop app, Copilot SDK integration, LangChain, tool calling, web server, tutorials, and WinML EP verification. |
13+
| [**Python**](python/) | 11 | Python samples using the OpenAI-compatible API, including chat, embeddings, audio transcription, LangChain integration, tool calling, web server, tutorials, and WinML EP verification. |
14+
| [**Rust**](rust/) | 10 | Rust SDK samples including native chat, embeddings, audio transcription, tool calling, web server, tutorials, and WinML EP verification. |
15+
| [**C++**](cpp/) | 2 | C++ SDK samples including WinML EP verification and live audio transcription. |
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
cmake_minimum_required(VERSION 3.20)
2+
3+
project(VerifyWinMLCpp LANGUAGES CXX)
4+
5+
set(BUILD_TESTING OFF CACHE BOOL "Build C++ SDK tests" FORCE)
6+
add_subdirectory("${CMAKE_CURRENT_LIST_DIR}/../../../sdk/cpp" "${CMAKE_CURRENT_BINARY_DIR}/sdk-cpp")
7+
8+
add_executable(VerifyWinML main.cpp)
9+
target_compile_features(VerifyWinML PRIVATE cxx_std_17)
10+
target_link_libraries(VerifyWinML PRIVATE CppSdk)

samples/cpp/verify-winml/README.md

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
# Verify WinML 2.0 Execution Providers (C++)
2+
3+
This sample verifies that WinML 2.0 execution providers are correctly
4+
discovered, downloaded, and registered using the Foundry Local C++ SDK. It then
5+
uses registered WinML EP-backed model variants and finishes with one native
6+
streaming chat check.
7+
8+
## Prerequisites
9+
10+
- Windows with a compatible GPU or NPU
11+
- Windows App SDK 2.0 runtime installed
12+
- A Foundry Local WinML native runtime copied next to the sample executable
13+
14+
The C++ SDK loads `Microsoft.AI.Foundry.Local.Core.dll` from the executable
15+
directory. Build or install a WinML-enabled SDK/runtime first, then copy the
16+
WinML native binaries next to `VerifyWinML.exe` before running the sample.
17+
18+
## Build
19+
20+
From this directory:
21+
22+
```powershell
23+
cmake -S . -B out\build -G "Visual Studio 18 2026" -A x64 `
24+
-DCMAKE_TOOLCHAIN_FILE="$env:VCPKG_ROOT\scripts\buildsystems\vcpkg.cmake" `
25+
-DVCPKG_TARGET_TRIPLET=x64-windows-static-md
26+
27+
cmake --build out\build --config Debug --target VerifyWinML
28+
```
29+
30+
## Run
31+
32+
```powershell
33+
.\out\build\Debug\VerifyWinML.exe
34+
```
35+
36+
## What it tests
37+
38+
1. **EP Discovery** - Lists all available execution providers.
39+
2. **EP Download & Registration** - Downloads and registers the available WinML EPs.
40+
3. **Model Catalog** - Lists text model variants backed by registered accelerated EPs.
41+
4. **Streaming Chat** - Runs streaming chat completion on a WinML EP-backed model via the native C++ SDK.

0 commit comments

Comments
 (0)