Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/actions/setup/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ runs:
run: |
HASH_FILE="/tmp/.yarn-lock-hash"
CURRENT_HASH=$(shasum yarn.lock | cut -d' ' -f1)
if [ -f "$HASH_FILE" ] && [ "$(cat "$HASH_FILE")" = "$CURRENT_HASH" ] && [ -d "node_modules" ]; then
echo "yarn.lock unchanged and node_modules exists — skipping install"
if [ -f "$HASH_FILE" ] && [ "$(cat "$HASH_FILE")" = "$CURRENT_HASH" ] && [ -d "node_modules" ] && [ -d "apps/example/node_modules" ] && [ -d "packages/webgpu/node_modules" ]; then
echo "yarn.lock unchanged and workspace node_modules present, skipping install"
else
yarn install --immutable
echo "$CURRENT_HASH" > "$HASH_FILE"
Expand Down
14 changes: 1 addition & 13 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,6 @@ export function HelloTriangle() {
passEncoder.end();

device.queue.submit([commandEncoder.finish()]);

context.present();
};
helloTriangle();
}, [ref]);
Expand Down Expand Up @@ -174,16 +172,7 @@ ctx.canvas.height = ctx.canvas.clientHeight * PixelRatio.get();

### Frame Scheduling

In React Native, we want to keep frame presentation as a manual operation as we plan to provide more advanced rendering options that are React Native specific.
This means that when you are ready to present a frame, you need to call `present` on the context.

```tsx
// draw
// submit to the queue
device.queue.submit([commandEncoder.finish()]);
// This method is React Native only
context.present();
```
Frame presentation is automatic, matching the behavior of `GPUCanvasContext` on the Web. A native display link (CADisplayLink on iOS, Choreographer on Android) ticks once per vsync and presents any surface whose texture was acquired during a previous vsync interval, which gives your render code the full frame between two vsyncs to encode and submit.

### Canvas Transparency

Expand Down Expand Up @@ -244,7 +233,6 @@ const renderFrame = (device: GPUDevice, context: GPUCanvasContext) => {
const commandEncoder = device.createCommandEncoder();
// ... render ...
device.queue.submit([commandEncoder.finish()]);
context.present();
};

// Initialize WebGPU on main thread, then run on UI thread
Expand Down
7 changes: 7 additions & 0 deletions apps/example/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ import { ComputeToys } from "./ComputeToys";
import { Reanimated } from "./Reanimated";
import { AsyncStarvation } from "./Diagnostics/AsyncStarvation";
import { DeviceLostHang } from "./Diagnostics/DeviceLostHang";
import { PresentRace } from "./Diagnostics/PresentRace";
import { MultiCanvasSubmit } from "./Diagnostics/MultiCanvasSubmit";
import { StorageBufferVertices } from "./StorageBufferVertices";

// The two lines below are needed by three.js
Expand Down Expand Up @@ -93,6 +95,11 @@ function App() {
<Stack.Screen name="Reanimated" component={Reanimated} />
<Stack.Screen name="AsyncStarvation" component={AsyncStarvation} />
<Stack.Screen name="DeviceLostHang" component={DeviceLostHang} />
<Stack.Screen name="PresentRace" component={PresentRace} />
<Stack.Screen
name="MultiCanvasSubmit"
component={MultiCanvasSubmit}
/>
<Stack.Screen
name="StorageBufferVertices"
component={StorageBufferVertices}
Expand Down
2 changes: 0 additions & 2 deletions apps/example/src/CanvasAPI/CanvasAPI.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,6 @@ export const CanvasAPI = () => {
passEncoder.end();

device.queue.submit([commandEncoder.finish()]);

context.present();
})()
}
title="check surface"
Expand Down
7 changes: 3 additions & 4 deletions apps/example/src/ComputeToys/engine/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
*/

import { Mutex } from "async-mutex";
import type { CanvasRef, RNCanvasContext } from "react-native-wgpu";
import type { CanvasRef } from "react-native-wgpu";

import { Bindings } from "./bind";
import { Blitter, ColorSpace } from "./blit";
Expand Down Expand Up @@ -37,7 +37,7 @@ export class ComputeEngine {

private device: GPUDevice;

private surface: RNCanvasContext | null = null;
private surface: GPUCanvasContext | null = null;
private screenWidth = -1;
private screenHeight = -1;

Expand Down Expand Up @@ -110,7 +110,7 @@ export class ComputeEngine {
}

public setSurface(canvas: CanvasRef) {
const context = canvas.getContext("webgpu") as RNCanvasContext;
const context = canvas.getContext("webgpu");
if (!context) {
throw new Error("WebGPU not supported");
}
Expand Down Expand Up @@ -398,7 +398,6 @@ fn passSampleLevelBilinearRepeat(pass_index: int, uv: float2, lod: float) -> flo

// Submit command buffer
this.device.queue.submit([encoder.finish()]);
this.surface!.present();

// Update frame counter
this.bindings!.time.host.frame += 1;
Expand Down
158 changes: 158 additions & 0 deletions apps/example/src/Diagnostics/MultiCanvasSubmit.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
import React, { useEffect, useRef } from "react";
import { StyleSheet, Text, View } from "react-native";
import type { CanvasRef } from "react-native-wgpu";
import { Canvas } from "react-native-wgpu";

type Mode = "combined" | "split";

const runPair = (
device: GPUDevice,
contextA: GPUCanvasContext,
contextB: GPUCanvasContext,
format: GPUTextureFormat,
mode: Mode,
shouldStop: () => boolean,
) => {
contextA.configure({ device, format, alphaMode: "premultiplied" });
contextB.configure({ device, format, alphaMode: "premultiplied" });

const frame = () => {
if (shouldStop()) {
return;
}

const textureA = contextA.getCurrentTexture();
const textureB = contextB.getCurrentTexture();

const time = Date.now() / 1000;
const r = (Math.sin(time * 2.0) + 1) / 2;
const g = (Math.sin(time * 1.5 + Math.PI / 3) + 1) / 2;
const b = (Math.sin(time * 1.0 + Math.PI / 2) + 1) / 2;

const drawClear = (
encoder: GPUCommandEncoder,
view: GPUTextureView,
color: GPUColor,
) => {
const pass = encoder.beginRenderPass({
colorAttachments: [
{
view,
clearValue: color,
loadOp: "clear",
storeOp: "store",
},
],
});
pass.end();
};

if (mode === "combined") {
// One encoder, two passes targeting two different surfaces, one
// command buffer, one submit. Tracks that beginRenderPass accumulates
// every color-attachment surface into the encoder's presentable set.
const encoder = device.createCommandEncoder();
drawClear(encoder, textureA.createView(), [r, g, b, 1]);
drawClear(encoder, textureB.createView(), [1 - r, 1 - g, 1 - b, 1]);
device.queue.submit([encoder.finish()]);
} else {
// Two encoders, two command buffers, one submit. Tracks that
// queue.submit aggregates presentable surfaces across every command
// buffer in the array.
const encoderA = device.createCommandEncoder();
drawClear(encoderA, textureA.createView(), [r, g, b, 1]);
const encoderB = device.createCommandEncoder();
drawClear(encoderB, textureB.createView(), [1 - r, 1 - g, 1 - b, 1]);
device.queue.submit([encoderA.finish(), encoderB.finish()]);
}

requestAnimationFrame(frame);
};

frame();
};

const Pair = ({ mode, label }: { mode: Mode; label: string }) => {
const refA = useRef<CanvasRef>(null);
const refB = useRef<CanvasRef>(null);
useEffect(() => {
let stopped = false;
(async () => {
const adapter = await navigator.gpu.requestAdapter();
if (!adapter) {
return;
}
const device = await adapter.requestDevice();
const contextA = refA.current?.getContext("webgpu");
const contextB = refB.current?.getContext("webgpu");
if (!contextA || !contextB) {
return;
}
const format = navigator.gpu.getPreferredCanvasFormat();
runPair(device, contextA, contextB, format, mode, () => stopped);
})();
return () => {
stopped = true;
};
}, [mode]);
return (
<View style={styles.pair}>
<Text style={styles.label}>{label}</Text>
<View style={styles.row}>
<Canvas ref={refA} style={styles.canvas} />
<Canvas ref={refB} style={styles.canvas} />
</View>
</View>
);
};

export const MultiCanvasSubmit = () => {
return (
<View style={styles.container}>
<Text style={styles.intro}>
Each row drives two canvases that render inverted hues from a single
submit. If the presentable-surface tracking is broken, one of the two
canvases will stop updating (no display-link tick will present it).
</Text>
<Pair
mode="combined"
label="One encoder, two passes, one command buffer"
/>
<Pair
mode="split"
label="Two encoders, two command buffers, one submit"
/>
</View>
);
};

const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: "#111",
padding: 12,
},
intro: {
color: "#f5f5f5",
fontSize: 13,
lineHeight: 18,
marginBottom: 12,
},
pair: {
flex: 1,
marginBottom: 12,
},
label: {
color: "#f5f5f5",
fontSize: 13,
marginBottom: 6,
},
row: {
flex: 1,
flexDirection: "row",
},
canvas: {
flex: 1,
marginRight: 6,
},
});
Loading
Loading