Skip to content

Commit cde3c9c

Browse files
authored
Merge pull request #4 from MeshJS/cip-113-contract
Cip 113 contract
2 parents 798ee08 + 176c91a commit cde3c9c

File tree

80 files changed

+9094
-3331
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

80 files changed

+9094
-3331
lines changed

src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,4 @@ export * from "./payment-splitter";
77
export * from "./plutus-nft";
88
export * from "./swap";
99
export * from "./vesting";
10+
export * from "./programmable-tokens";

src/programmable-tokens/aiken-workspace/.gitignore renamed to src/programmable-tokens/aiken-workspace-standard/.gitignore

File renamed without changes.

src/programmable-tokens/aiken-workspace-standard/README.md

Lines changed: 313 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# This file was generated by Aiken
2+
# You typically do not need to edit this file
3+
4+
[[requirements]]
5+
name = "aiken-lang/stdlib"
6+
version = "v3.0.0"
7+
source = "github"
8+
9+
[[requirements]]
10+
name = "aiken-lang/fuzz"
11+
version = "main"
12+
source = "github"
13+
14+
[[packages]]
15+
name = "aiken-lang/stdlib"
16+
version = "v3.0.0"
17+
requirements = []
18+
source = "github"
19+
20+
[[packages]]
21+
name = "aiken-lang/fuzz"
22+
version = "main"
23+
requirements = []
24+
source = "github"
25+
26+
[etags]
27+
"aiken-lang/fuzz@main" = [{ secs_since_epoch = 1775479957, nanos_since_epoch = 627298000 }, "9843473958e51725a9274b487d2d4aac0395ec1a2e30f090724fa737226bc127"]

src/programmable-tokens/aiken-workspace/aiken.toml renamed to src/programmable-tokens/aiken-workspace-standard/aiken.toml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
name = "iohk/programmable-tokens"
22
version = "0.3.0"
3-
compiler = "v1.1.17"
3+
compiler = "v1.1.21"
44
plutus = "v3"
55
license = "Apache-2.0"
66
description = "Aiken implementation of CIP-0143 programmable tokens (migrated from Plutarch)"
@@ -15,4 +15,9 @@ name = "aiken-lang/stdlib"
1515
version = "v3.0.0"
1616
source = "github"
1717

18+
[[dependencies]]
19+
name = "aiken-lang/fuzz"
20+
version = "main"
21+
source = "github"
22+
1823
[config]
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
#!/usr/local/bin/bash
2+
3+
set -x
4+
5+
aiken build && cp plutus.json ../programmable-tokens-offchain-java/src/main/resources
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
pub fn assert_no_ada_policy(value: a) -> a {
2+
value
3+
}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
use aiken/collection/pairs
2+
use cardano/assets.{PolicyId, ada_policy_id}
3+
4+
/// Enforces that the given value has no ada.
5+
pub fn assert_no_ada_policy(value: Pairs<PolicyId, v>) -> Pairs<PolicyId, v> {
6+
trace @"assert_no_ada_policy"
7+
expect None = pairs.get_first(value, ada_policy_id)
8+
value
9+
}
Lines changed: 156 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,156 @@
1+
use aiken/builtin.{less_than_bytearray}
2+
use aiken/collection/dict
3+
use cardano/assets.{PolicyId, Value}
4+
use cardano/transaction.{Output}
5+
use env
6+
use list
7+
use tokens.{Tokens}
8+
9+
pub type Assets =
10+
Pairs<PolicyId, Tokens>
11+
12+
pub type SumStrategy<i> =
13+
fn(i, fn(Output) -> Assets, fn() -> Assets) -> Assets
14+
15+
/// A Convenient helper to extract assets from values. Comes at no cost since it is usually inlined by the compiler.
16+
pub fn from_value(self: Value) -> Assets {
17+
self |> assets.to_dict |> dict.to_pairs
18+
}
19+
20+
/// Split an dictionnary at the given key. Returning the elements before (in reverse key order), the
21+
/// value at the key, and the elements after (in same key order).
22+
pub fn split_at(
23+
self: Value,
24+
at: PolicyId,
25+
return: fn(Assets, Tokens, Assets) -> result,
26+
) -> result {
27+
do_split_at(from_value(self), at, [], return)
28+
}
29+
30+
fn do_split_at(
31+
self: Assets,
32+
at: PolicyId,
33+
before: Assets,
34+
return: fn(Assets, Tokens, Assets) -> result,
35+
) -> result {
36+
when self is {
37+
[] -> return(before, dict.empty, [])
38+
[head, ..tail] -> {
39+
let k = head.1st
40+
if less_than_bytearray(k, at) {
41+
// Skip while keys are smaller (strictly) than searched key
42+
do_split_at(tail, at, [head, ..before], return)
43+
} else if k == at {
44+
// Done searching, get the value and return the tail after
45+
return(before, head.2nd, tail)
46+
} else {
47+
// The head and tail are all after the key; no need to continue searching.
48+
return(before, dict.empty, self)
49+
}
50+
}
51+
}
52+
}
53+
54+
/// Lookup the first asset policy from an output, or fails loudly if the output holds no tokens (beyond Ada).
55+
pub fn peek_first(self: Output) -> PolicyId {
56+
list.head(list.tail(from_value(self.value))).1st
57+
}
58+
59+
/// A faster version of assets.merge that preserves empty maps. This allows to
60+
/// bypass the null check on value since quantities can only ever increase.
61+
/// Importantly, it is also *necessary* to ensure that policies are retained in
62+
/// the map for later validations even if they result in no outputs (e.g. a burn
63+
/// fully compensate a spend).
64+
///
65+
/// It also focuses on assets and completely ignores Ada.
66+
///
67+
/// The function recurses over a list or input-like objects, and let the
68+
/// caller select whether to add an output or not.
69+
///
70+
/// ## Example
71+
///
72+
/// ```aiken
73+
/// collect(
74+
/// self.inputs,
75+
/// [],
76+
/// fn(input, select, discard) {
77+
/// let output = input.output
78+
/// if output.address.payment_credential == needle {
79+
/// select(output)
80+
/// } else {
81+
/// discard()
82+
/// }
83+
/// },
84+
/// )
85+
/// ```
86+
///
87+
/// /!\ PRE-CONDITION /!\
88+
/// The given zero assets is expected to not contain any Ada. This is enforced
89+
/// when assertions are enabled (`--env with_assertions`).
90+
///
91+
/// This is generally true when the initial zero is an empty list or coming
92+
/// from the mint value.
93+
pub fn collect(elems: List<i>, zero: Assets, strategy: SumStrategy<i>) -> Assets {
94+
do_collect(elems, strategy, env.assert_no_ada_policy(zero))
95+
}
96+
97+
fn do_collect(elems: List<i>, strategy: SumStrategy<i>, sum: Assets) -> Assets {
98+
when elems is {
99+
[] -> sum
100+
[head, ..tail] ->
101+
do_collect(
102+
tail,
103+
strategy,
104+
strategy(
105+
head,
106+
// Output is selected
107+
fn(output) {
108+
output.value
109+
|> from_value
110+
// Drop ADA, guaranteed to be present in outputs.
111+
|> list.tail
112+
// NOTE: left-optimised union
113+
// The `union` consumes the left argument into the right argument. So it is
114+
// generally better to provide the smallest argument as left value. The `sum`
115+
// will generally grow as large as outputs and in many scenario will no be much
116+
// larger. However, it is very often smaller initially (often empty). Hence why
117+
// we force it as first argument here.
118+
|> union(sum, _)
119+
},
120+
// Output is discarded
121+
fn() { sum },
122+
),
123+
)
124+
}
125+
}
126+
127+
/// Merge two Assets by summing token quantities.
128+
/// Used to combine validated input prog value with validated mint prog value.
129+
pub fn union(left: Assets, right: Assets) -> Assets {
130+
when left is {
131+
[] -> right
132+
// NOTE: Preserving null assets
133+
// It is primordial here to not discard assets if even they result in an
134+
// empty dict. This is because the 'left' assets may have negative quantities
135+
// coming from burns. If a burn is fully cover by a spend, we must preserve
136+
// the key in the map to ensure that validations necessary to that policy
137+
// occur as expected.
138+
[Pair(k, v), ..rest] -> union(rest, do_insert(right, k, v))
139+
}
140+
}
141+
142+
fn do_insert(self: Assets, k1: PolicyId, v1: Tokens) -> Assets {
143+
when self is {
144+
[] -> [Pair(k1, v1)]
145+
[Pair(k2, v2), ..rest] ->
146+
if less_than_bytearray(k1, k2) {
147+
[Pair(k1, v1), ..self]
148+
} else {
149+
if k1 == k2 {
150+
[Pair(k1, tokens.union(v1, v2)), ..rest]
151+
} else {
152+
[Pair(k2, v2), ..do_insert(rest, k1, v1)]
153+
}
154+
}
155+
}
156+
}

0 commit comments

Comments
 (0)