@@ -10,13 +10,13 @@ Under the hood, it wraps the callback in a `TgpuFn`, creates a compute pipeline,
1010Since the pipeline is reused, there’s no additional overhead for subsequent calls.
1111
1212``` ts twoslash
13- import tgpu , { prepareDispatch } from ' typegpu' ;
13+ import tgpu from ' typegpu' ;
1414import * as d from ' typegpu/data' ;
1515const root = await tgpu .init ();
1616// ---cut---
1717const data = root .createMutable (d .arrayOf (d .u32 , 8 ), [0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ]);
1818
19- const doubleUp = prepareDispatch (root , (x ) => {
19+ const doubleUp = root [ ' ~unstable ' ]. prepareDispatch ((x ) => {
2020 ' use gpu' ;
2121 data .$ [x ] *= 2 ;
2222});
@@ -39,7 +39,7 @@ Buffer initialization commonly uses random number generators.
3939For that, you can use the [ ` @typegpu/noise ` ] ( TypeGPU/ecosystem/typegpu-noise ) library.
4040
4141``` ts twoslash
42- import tgpu , { prepareDispatch } from ' typegpu' ;
42+ import tgpu from ' typegpu' ;
4343import * as d from ' typegpu/data' ;
4444// ---cut---
4545import { randf } from ' @typegpu/noise' ;
@@ -51,7 +51,7 @@ const waterLevelMutable = root.createMutable(
5151 d .arrayOf (d .arrayOf (d .f32 , 512 ), 1024 ),
5252);
5353
54- prepareDispatch (root , (x , y ) => {
54+ root [ ' ~unstable ' ]. prepareDispatch ((x , y ) => {
5555 ' use gpu' ;
5656 randf .seed2 (d .vec2f (x , y ).div (1024 ));
5757 waterLevelMutable .$ [x ][y ] = 10 + randf .sample ();
@@ -65,7 +65,7 @@ console.log(await waterLevelMutable.read());
6565The result of ` prepareDispatch ` can have bind groups bound using the ` with ` method.
6666
6767``` ts twoslash
68- import tgpu , { prepareDispatch } from ' typegpu' ;
68+ import tgpu from ' typegpu' ;
6969import * as d from ' typegpu/data' ;
7070import * as std from ' typegpu/std' ;
7171const root = await tgpu .init ();
@@ -84,7 +84,7 @@ const bindGroup2 = root.createBindGroup(layout, {
8484 buffer: buffer2 ,
8585});
8686
87- const test = prepareDispatch (root , (x ) => {
87+ const test = root [ ' ~unstable ' ]. prepareDispatch ((x ) => {
8888 ' use gpu' ;
8989 layout .$ .buffer [x ] *= 2 ;
9090});
@@ -123,13 +123,13 @@ Yes, you read that correctly, TypeGPU implements logging to the console on the G
123123Just call ` console.log ` like you would in plain JavaScript, and open the console to see the results.
124124
125125``` ts twoslash
126- import tgpu , { prepareDispatch } from ' typegpu' ;
126+ import tgpu from ' typegpu' ;
127127import * as d from ' typegpu/data' ;
128128
129129const root = await tgpu .init ();
130130// ---cut---
131131const callCountMutable = root .createMutable (d .u32 , 0 );
132- const compute = prepareDispatch (root , () => {
132+ const compute = root [ ' ~unstable ' ]. prepareDispatch (() => {
133133 ' use gpu' ;
134134 callCountMutable .$ += 1 ;
135135 console .log (' Call number' , callCountMutable .$ );
@@ -151,7 +151,7 @@ The buffer is of fixed size, which may limit the total amount of information tha
151151If that's an issue, you may specify the size manually when creating the ` root ` object.
152152
153153``` ts twoslash
154- import tgpu , { prepareDispatch } from ' typegpu' ;
154+ import tgpu from ' typegpu' ;
155155import * as d from ' typegpu/data' ;
156156
157157const presentationFormat = undefined as any ;
0 commit comments