diff --git a/.gitignore b/.gitignore
index bc29d0c..bd44d77 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,3 +6,5 @@ mintlify-docs/
# Dependencies
node_modules/
+
+stash
\ No newline at end of file
diff --git a/compressed-pdas/create-a-program-with-compressed-pdas.mdx b/compressed-pdas/overview.mdx
similarity index 100%
rename from compressed-pdas/create-a-program-with-compressed-pdas.mdx
rename to compressed-pdas/overview.mdx
diff --git a/cspell.json b/cspell.json
index 0a506eb..4ea32a3 100644
--- a/cspell.json
+++ b/cspell.json
@@ -175,7 +175,16 @@
"stablecoins",
"fintechs",
"micropayments",
- "clawback"
+ "clawback",
+ "hackathon",
+ "altbn",
+ "circom",
+ "snarkjs",
+ "Zcash",
+ "zcash",
+ "circomlibjs",
+ "Jotaro",
+ "Yano"
],
"ignorePaths": [
"node_modules",
diff --git a/docs.json b/docs.json
index d3a97c7..050a5c2 100644
--- a/docs.json
+++ b/docs.json
@@ -99,6 +99,33 @@
"quickstart"
]
},
+ {
+ "group": "ZK",
+ "pages": [
+ "zk/overview",
+ "zk/examples"
+ ]
+ },
+ {
+ "group": "Compressed PDAs",
+ "pages": [
+ "compressed-pdas/overview",
+ {
+ "group": "Program Guides",
+ "pages": [
+ "compressed-pdas/guides",
+ "compressed-pdas/guides/how-to-create-compressed-accounts",
+ "compressed-pdas/guides/how-to-update-compressed-accounts",
+ "compressed-pdas/guides/how-to-close-compressed-accounts",
+ "compressed-pdas/guides/how-to-reinitialize-compressed-accounts",
+ "compressed-pdas/guides/how-to-burn-compressed-accounts"
+ ]
+ },
+ "compressed-pdas/program-examples",
+ "client-library/client-guide",
+ "compressed-pdas/solana-attestation-service"
+ ]
+ },
{
"group": "Compressed Tokens",
"pages": [
@@ -144,26 +171,6 @@
}
]
},
- {
- "group": "Compressed PDAs",
- "pages": [
- "compressed-pdas/create-a-program-with-compressed-pdas",
- {
- "group": "Program Guides",
- "pages": [
- "compressed-pdas/guides",
- "compressed-pdas/guides/how-to-create-compressed-accounts",
- "compressed-pdas/guides/how-to-update-compressed-accounts",
- "compressed-pdas/guides/how-to-close-compressed-accounts",
- "compressed-pdas/guides/how-to-reinitialize-compressed-accounts",
- "compressed-pdas/guides/how-to-burn-compressed-accounts"
- ]
- },
- "compressed-pdas/program-examples",
- "client-library/client-guide",
- "compressed-pdas/solana-attestation-service"
- ]
- },
{
"group": "JSON RPC Methods",
"pages": [
@@ -240,6 +247,7 @@
"references/whitepaper",
"references/node-operators",
"references/terminology",
+ "references/migration-v1-to-v2",
"support",
"references/security"
]
diff --git a/home.mdx b/home.mdx
index 5beda94..061f242 100644
--- a/home.mdx
+++ b/home.mdx
@@ -189,7 +189,7 @@ import WelcomePageInstall from "/snippets/setup/welcome-page-install.mdx";
Program and client guides for rent-free PDA accounts.
diff --git a/learn/core-concepts/considerations.mdx b/learn/core-concepts/considerations.mdx
index 94eaaf3..0651fff 100644
--- a/learn/core-concepts/considerations.mdx
+++ b/learn/core-concepts/considerations.mdx
@@ -81,7 +81,7 @@ You're ready to take the next step and start building!
title="Compressed PDAs"
icon="chevron-right"
color="#0066ff"
- href="/compressed-pdas/create-a-program-with-compressed-pdas"
+ href="/compressed-pdas/overview"
horizontal
/>
\ No newline at end of file
diff --git a/package-lock.json b/package-lock.json
index 7b54323..0ca6b8f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -8,7 +8,7 @@
"name": "docs-v2",
"version": "1.0.0",
"devDependencies": {
- "cspell": "^8.6.0",
+ "cspell": "^8.19.4",
"eslint": "^8.57.0",
"eslint-plugin-react": "^7.34.1",
"eslint-plugin-react-hooks": "^4.6.2",
diff --git a/package.json b/package.json
index 938ade2..2cecceb 100644
--- a/package.json
+++ b/package.json
@@ -12,11 +12,10 @@
"ci": "npm run format:check && npm run lint && npm run spellcheck"
},
"devDependencies": {
- "cspell": "^8.6.0",
+ "cspell": "^8.19.4",
"eslint": "^8.57.0",
"eslint-plugin-react": "^7.34.1",
"eslint-plugin-react-hooks": "^4.6.2",
"prettier": "^3.2.5"
}
}
-
diff --git a/references/migration-v1-to-v2.mdx b/references/migration-v1-to-v2.mdx
index c5955ff..907cb7c 100644
--- a/references/migration-v1-to-v2.mdx
+++ b/references/migration-v1-to-v2.mdx
@@ -1,18 +1,19 @@
---
title: Guide to Migrate Programs from v1 to v2 Merkle trees
description: V2 reduces CU consumption by up to 70%. V1 remains supported for existing deployments.
-sidebarTitle: Update Guide to V2
+sidebarTitle: V2 Migration Guide
---
-## V2 Improvements
+import V1ToV2MigrationPrompt from "/snippets/ai-prompts/v1-to-v2-migration.mdx";
-| | v1 | v2 |
-|---------------------|------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------|
-| Merkle tree type | Concurrent | Batched |
-| State tree depth | 26 (~67M leaves) | 32 (~4B leaves) |
-| Address tree depth | 26 | 40 |
-| Address tree accounts| Separate tree + queue | Single batch tree |
+## V2 Improvements
+| | v1 | v2 |
+|-----------------------|-----------------------|--------------------|
+| Merkle tree type | Concurrent | Batched |
+| State tree depth | 26 (~67M leaves) | 32 (~4B leaves) |
+| Address tree depth | 26 | 40 |
+| Address tree accounts | Separate tree + queue | Single batch tree |
@@ -250,6 +251,11 @@ let new_address_params = instruction_data
.into_new_address_params_assigned_packed(address_seed, Some(0));
```
+
+
+
+
+
diff --git a/resources/addresses-and-urls.mdx b/resources/addresses-and-urls.mdx
index 36c0e1f..9b316cc 100644
--- a/resources/addresses-and-urls.mdx
+++ b/resources/addresses-and-urls.mdx
@@ -43,17 +43,7 @@ Find all JSON RPC Methods for ZK Compression [here](/api-reference/json-rpc-meth
-
-
-| | Devnet | Mainnet | Public Key |
-|:-|:-:|:-:|:-|
-| State Tree | ✓ | ✓ | **smt2rJAFdyJJupwMKAqTNAJwvjhmiZ4JYGZmbVRw1Ho** |
-| Nullifier Queue | ✓ | ✓ | **nfq2hgS7NYemXsFaFUCe3EMXSDSfnZnAe27jC6aPP1X** |
-| CPI Context | ✓ | ✓ | **cpi2cdhkH5roePvcudTgUL8ppEBfTay1desGh8G8QxK** |
-
-
-
-
+
| | Devnet | Mainnet | Public Key |
|:-|:-:|:-:|:-|
@@ -86,23 +76,35 @@ Find all JSON RPC Methods for ZK Compression [here](/api-reference/json-rpc-meth
| #5 CPI Context | ✓ | - | **cpi5ZTjdgYpZ1Xr7B1cMLLUE81oTtJbNNAyKary2nV6** |
+
+
+
+| | Devnet | Mainnet | Public Key |
+|:-|:-:|:-:|:-|
+| State Tree | ✓ | ✓ | **smt2rJAFdyJJupwMKAqTNAJwvjhmiZ4JYGZmbVRw1Ho** |
+| Nullifier Queue | ✓ | ✓ | **nfq2hgS7NYemXsFaFUCe3EMXSDSfnZnAe27jC6aPP1X** |
+| CPI Context | ✓ | ✓ | **cpi2cdhkH5roePvcudTgUL8ppEBfTay1desGh8G8QxK** |
+
+
+
+
## Address Trees & Queues
-
-| Address Tree #1 | |
-|:-|:-|
-| Address Tree #1 | **amt1Ayt45jfbdw5YSo7iz6WZxUmnZsQTYXy82hVwyC2** |
-| Address Queue #1 | **aq1S9z4reTSQAdgWHGD2zDaS39sjGrAxbR31vxJ2F4F** |
-
| Address Tree | |
|:-|:-|
| Address Tree | **amt2kaJA14v3urZbZvnc5v2np8jqvc4Z8zDep5wbtzx** |
+
+| Address Tree #1 | |
+|:-|:-|
+| Address Tree #1 | **amt1Ayt45jfbdw5YSo7iz6WZxUmnZsQTYXy82hVwyC2** |
+| Address Queue #1 | **aq1S9z4reTSQAdgWHGD2zDaS39sjGrAxbR31vxJ2F4F** |
+
## Interface PDA
@@ -186,7 +188,7 @@ Start building with Compressed Tokens or PDAs
title="Compressed PDAs"
icon="chevron-right"
color="#0066ff"
- href="/compressed-pdas/create-a-program-with-compressed-pdas"
+ href="/compressed-pdas/overview"
horizontal
/>
\ No newline at end of file
diff --git a/resources/cli-installation.mdx b/resources/cli-installation.mdx
index ed7a6ce..d80cda7 100644
--- a/resources/cli-installation.mdx
+++ b/resources/cli-installation.mdx
@@ -253,7 +253,7 @@ FLAGS
title="Build with compressed PDAs"
icon="chevron-right"
color="#0066ff"
- href="/compressed-pdas/create-a-program-with-compressed-pdas"
+ href="/compressed-pdas/overview"
horizontal
/>
diff --git a/scripts/copy-zk-merkle-proof.sh b/scripts/copy-zk-merkle-proof.sh
new file mode 100644
index 0000000..f94b221
--- /dev/null
+++ b/scripts/copy-zk-merkle-proof.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+# Script to copy zk-merkle-proof code from program-examples to docs/snippets
+# Wraps each file in appropriate markdown code blocks
+
+PROGRAM_EXAMPLES="/home/tilo/Workspace/program-examples/zk/zk-merkle-proof"
+SNIPPETS_DIR="/home/tilo/Workspace/docs/snippets/code-snippets/zk/merkle-proof"
+
+mkdir -p "$SNIPPETS_DIR"
+
+wrap_code() {
+ local input_file="$1"
+ local output_file="$2"
+ local lang="$3"
+ echo "\`\`\`$lang" > "$output_file"
+ cat "$input_file" >> "$output_file"
+ echo '```' >> "$output_file"
+ echo "Created: $output_file"
+}
+
+# Circuit
+if [ -f "$PROGRAM_EXAMPLES/circuits/merkle_proof.circom" ]; then
+ wrap_code "$PROGRAM_EXAMPLES/circuits/merkle_proof.circom" "$SNIPPETS_DIR/circuit.mdx" "javascript expandable"
+fi
+
+# Program
+if [ -f "$PROGRAM_EXAMPLES/src/lib.rs" ]; then
+ wrap_code "$PROGRAM_EXAMPLES/src/lib.rs" "$SNIPPETS_DIR/program.mdx" "rust expandable"
+fi
+
+# Rust client
+if [ -f "$PROGRAM_EXAMPLES/tests/test.rs" ]; then
+ wrap_code "$PROGRAM_EXAMPLES/tests/test.rs" "$SNIPPETS_DIR/rust-client.mdx" "rust expandable"
+fi
+
+# TypeScript client
+if [ -f "$PROGRAM_EXAMPLES/ts-tests/merkle-proof.test.ts" ]; then
+ wrap_code "$PROGRAM_EXAMPLES/ts-tests/merkle-proof.test.ts" "$SNIPPETS_DIR/typescript-client.mdx" "typescript expandable"
+fi
+
+echo ""
+echo "Done! Created snippets in: $SNIPPETS_DIR"
+find "$SNIPPETS_DIR" -name "*.mdx" -type f | sort
diff --git a/scripts/copy-zk-nullifier.sh b/scripts/copy-zk-nullifier.sh
new file mode 100755
index 0000000..32aa8a8
--- /dev/null
+++ b/scripts/copy-zk-nullifier.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+# Script to copy zk-nullifier code from program-examples to docs/snippets
+
+PROGRAM_EXAMPLES="/home/tilo/Workspace/program-examples/zk/zk-nullifier"
+SNIPPETS_DIR="/home/tilo/Workspace/docs/snippets/code-snippets/zk/nullifier"
+
+mkdir -p "$SNIPPETS_DIR"
+
+wrap_code() {
+ local input_file="$1"
+ local output_file="$2"
+ local lang="$3"
+ echo "\`\`\`$lang" > "$output_file"
+ cat "$input_file" >> "$output_file"
+ echo '```' >> "$output_file"
+ echo "Created: $output_file"
+}
+
+# Circuit
+if [ -f "$PROGRAM_EXAMPLES/circuits/nullifier.circom" ]; then
+ wrap_code "$PROGRAM_EXAMPLES/circuits/nullifier.circom" "$SNIPPETS_DIR/circuit.mdx" "javascript expandable"
+fi
+
+# Program
+if [ -f "$PROGRAM_EXAMPLES/programs/zk-nullifier/src/lib.rs" ]; then
+ wrap_code "$PROGRAM_EXAMPLES/programs/zk-nullifier/src/lib.rs" "$SNIPPETS_DIR/program.mdx" "rust expandable"
+fi
+
+# Rust client (unified)
+if [ -f "$PROGRAM_EXAMPLES/programs/zk-nullifier/tests/test.rs" ]; then
+ wrap_code "$PROGRAM_EXAMPLES/programs/zk-nullifier/tests/test.rs" "$SNIPPETS_DIR/rust-client.mdx" "rust expandable"
+fi
+
+# TypeScript client
+if [ -f "$PROGRAM_EXAMPLES/ts-tests/nullifier.test.ts" ]; then
+ wrap_code "$PROGRAM_EXAMPLES/ts-tests/nullifier.test.ts" "$SNIPPETS_DIR/typescript-client.mdx" "typescript expandable"
+fi
+
+echo ""
+echo "Done! Created snippets in: $SNIPPETS_DIR"
+find "$SNIPPETS_DIR" -name "*.mdx" -type f | sort
diff --git a/snippets/ai-prompts/v1-to-v2-migration.mdx b/snippets/ai-prompts/v1-to-v2-migration.mdx
new file mode 100644
index 0000000..f6743d3
--- /dev/null
+++ b/snippets/ai-prompts/v1-to-v2-migration.mdx
@@ -0,0 +1,105 @@
+---
+argument-hint:
+description: Migrate Light Protocol program from v1 to v2 Merkle trees
+allowed-tools: [Bash, Read, Glob, Grep, Task, WebFetch]
+---
+
+Migrate this Light Protocol program from v1 to v2 Merkle trees.
+
+## Goal
+
+Produce a **fully working migration** that builds and tests pass.
+
+## Available commands
+
+Via Bash tool:
+- `cargo build-sbf`, `cargo test-sbf`, `cargo fmt`, `cargo clippy`
+- `anchor build`, `anchor test`
+- `grep`, `sed`
+
+## Documentation
+
+- Migration Guide: https://zkcompression.com/references/migration-v1-to-v2
+- Reference PR: https://github.com/Lightprotocol/program-examples/commit/54f0e7f15c2972a078f776cfb40b238d83c7e486
+
+## Reference repos
+
+program-examples/counter/anchor/
+├── programs/counter/src/lib.rs # v2 patterns: derive_address, CpiAccounts
+├── Cargo.toml # v2 feature flags
+└── tests/counter.ts # v2 client patterns
+
+## Workflow
+
+### Phase 1: Index program
+
+Find all v1 patterns:
+
+ grep -r "::v1::" src/ tests/
+ grep -r "ADDRESS_TREE_V1" src/
+ grep -r "into_new_address_params_packed" src/
+ grep -r "get_address_tree_v1" tests/
+
+### Phase 2: Update dependencies
+
+Add v2 feature to Cargo.toml:
+
+ [dependencies]
+ light-sdk = { version = "0.17", features = ["anchor", "v2"] }
+ light-sdk-types = { version = "0.17", features = ["v2"] }
+
+ [dev-dependencies]
+ light-program-test = { version = "0.17", features = ["v2"] }
+ light-client = { version = "0.17", features = ["v2"] }
+
+### Phase 3: Rust SDK replacements
+
+| v1 Pattern | v2 Replacement |
+|------------|----------------|
+| address::v1::derive_address | address::v2::derive_address |
+| cpi::v1::CpiAccounts | cpi::v2::CpiAccounts |
+| cpi::v1::LightSystemProgramCpi | cpi::v2::LightSystemProgramCpi |
+| constants::ADDRESS_TREE_V1 | light_sdk_types::ADDRESS_TREE_V2 |
+| .into_new_address_params_packed(seed) | .into_new_address_params_assigned_packed(seed, Some(0)) |
+| .add_system_accounts(config) | .add_system_accounts_v2(config) |
+
+### Phase 4: TypeScript SDK replacements
+
+| v1 Pattern | v2 Replacement |
+|------------|----------------|
+| deriveAddress( | deriveAddressV2( |
+| deriveAddressSeed( | deriveAddressSeedV2( |
+| defaultTestStateTreeAccounts().addressTree | batchAddressTree |
+| .newWithSystemAccounts( | .newWithSystemAccountsV2( |
+| get_address_tree_v1() | get_address_tree_v2() |
+| get_random_state_tree_info_v1() | get_random_state_tree_info() |
+
+### Phase 5: Build and test loop
+
+**Required commands (no shortcuts):**
+
+For Anchor programs: `anchor build && anchor test`
+
+For Native programs: `cargo build-sbf && cargo test-sbf`
+
+**NO shortcuts allowed:**
+
+- Do NOT use `cargo build` (must use `cargo build-sbf`)
+- Do NOT use `cargo test` (must use `cargo test-sbf`)
+- Tests MUST run against real BPF bytecode
+
+**On failure:** Spawn debugger agent with error context.
+
+**Loop rules:**
+
+1. Each debugger gets fresh context + previous debug reports
+2. Each attempt tries something DIFFERENT
+3. **NEVER GIVE UP** - keep spawning until fixed
+
+Do NOT proceed until all tests pass.
+
+## DeepWiki fallback
+
+If no matching pattern in reference repos:
+
+ mcp__deepwiki__ask_question("Lightprotocol/light-protocol", "How to migrate {pattern} from v1 to v2?")
\ No newline at end of file
diff --git a/snippets/ai-prompts/zk-app.mdx b/snippets/ai-prompts/zk-app.mdx
new file mode 100644
index 0000000..9edf37c
--- /dev/null
+++ b/snippets/ai-prompts/zk-app.mdx
@@ -0,0 +1,155 @@
+```markdown expandable
+---
+argument-hint:
+description: Design a ZK App POC with rent-free nullifiers, compressed accounts, and Groth16 circuits
+allowed-tools: [Bash, Read, Glob, Grep, Task, WebFetch]
+---
+
+Design a Solana program with tests that uses rent-free nullifiers, compressed accounts, and Groth16 circuits.
+
+
+## Initial App Design
+
+
+## Goal
+
+Produce a **fully working POC** that builds and tests pass.
+
+## Available commands
+
+Via Bash tool:
+- `cargo build-sbf`, `cargo test-sbf`, `cargo fmt`, `cargo clippy`
+- `anchor build`, `anchor test`, `anchor deploy`
+- `circom`, `snarkjs`, `solana`, `light`
+
+## Documentation
+
+- Nullifiers: https://zkcompression.com/zk/nullifiers
+- Compressed Accounts with Poseidon Hashes: https://zkcompression.com/zk/compressed-account-zk
+
+## Reference repos
+
+Nullifier example. Does not need Circuit.
+nullifier/
+├── programs/nullifier/
+│ ├── src/lib.rs # Program with create_nullifiers helper
+│ └── tests/test.rs # Rust integration tests
+└── ts-tests/
+ └── nullifier.test.ts # TypeScript tests
+
+Full Example
+program-examples/zk/zk-id/
+├── programs/zk-id/src/
+│ ├── lib.rs # create_issuer, add_credential, zk_verify_credential
+│ └── verifying_key.rs # Groth16 key from circom trusted setup
+├── circuits/
+│ └── compressed_account_merkle_proof.circom # Merkle proof
+└── tests/
+└── zk-id.ts # Proof generation + on-chain verification
+
+
+## Workflow
+
+### Phase 1: Design application
+
+**1.1 Define private state**
+
+What data stays private? (credentials, balances, votes, etc.)
+
+**1.2 Define public inputs**
+
+What does the circuit prove publicly? (nullifier, merkle root, commitments)
+
+**1.3 Do not include circuits**
+
+
+
+### Phase 2: Index reference implementation
+
+ grep -r "LightAccountPoseidon" program-examples/zk/
+ grep -r "Groth16Verifier" program-examples/zk/
+ grep -r "derive_address.*nullifier" program-examples/zk/
+ grep -r "read_state_merkle_tree_root" program-examples/zk/
+
+Read matching files to understand patterns.
+
+### Phase 3: Circuit development
+
+**3.1 Write circom circuit**
+
+Based on compressed_account_merkle_proof.circom:
+- Merkle proof verification
+- Nullifier computation
+- Public input constraints
+
+**3.2 Trusted setup**
+
+ circom circuit.circom --r1cs --wasm --sym
+ snarkjs groth16 setup circuit.r1cs pot_final.ptau circuit_0000.zkey
+ snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
+ snarkjs zkey export solidityverifier circuit_final.zkey # adapt for Solana
+
+**3.3 Add sensitive files to .gitignore**
+
+ *.zkey
+ *.ptau
+ *.r1cs
+ *_js/
+
+### Phase 4: Program implementation
+
+| Pattern | Function | Reference |
+|---------|----------|-----------|
+| Poseidon state | `LightAccountPoseidon::new_init()` | zk-id/lib.rs |
+| Nullifier address | `derive_address([prefix, nullifier, ctx], tree, program)` | zk-id/lib.rs |
+| Read root only | `read_state_merkle_tree_root()` | zk-id/lib.rs |
+| Groth16 verify | `Groth16Verifier::new().verify()` | zk-id/lib.rs |
+
+**Dependencies:**
+
+ [dependencies]
+ anchor-lang = "0.31.1"
+ light-sdk = { version = "0.17.1", features = ["anchor", "poseidon", "merkle-tree", "v2"] }
+ light-hasher = "5.0.0"
+ light-sdk-types = { version = "0.17.1", features = ["v2"] }
+ groth16-solana = { git = "https://github.com/Lightprotocol/groth16-solana", rev = "66c0dc87" }
+
+ [dev-dependencies]
+ light-program-test = "0.17.1"
+ light-client = "0.17.1"
+
+### Phase 5: Build and test loop
+
+**Required commands (no shortcuts):**
+
+For Anchor programs: `anchor build && anchor test`
+
+For Native programs: `cargo build-sbf && cargo test-sbf`
+
+**NO shortcuts allowed:**
+
+- Do NOT use `cargo build` (must use `cargo build-sbf`)
+- Do NOT use `cargo test` (must use `cargo test-sbf`)
+- Do NOT skip SBF compilation
+- Tests MUST run against real BPF bytecode
+
+**On failure:** Spawn debugger agent with error context.
+
+**Loop rules:**
+
+1. Each debugger gets fresh context + previous debug reports
+2. Each attempt tries something DIFFERENT
+3. **NEVER GIVE UP** - keep spawning until fixed
+
+Do NOT proceed until all tests pass.
+
+### Phase 6: Cleanup (only after tests pass)
+
+ rm -rf target/
+
+## DeepWiki fallback
+
+If no matching pattern in reference repos:
+
+ mcp__deepwiki__ask_question("Lightprotocol/light-protocol", "How to {operation}?")
+```
\ No newline at end of file
diff --git a/snippets/code-snippets/zk/merkle-proof/circuit.mdx b/snippets/code-snippets/zk/merkle-proof/circuit.mdx
new file mode 100644
index 0000000..d650167
--- /dev/null
+++ b/snippets/code-snippets/zk/merkle-proof/circuit.mdx
@@ -0,0 +1,84 @@
+```javascript expandable
+pragma circom 2.0.0;
+
+include "../node_modules/circomlib/circuits/poseidon.circom";
+include "../node_modules/circomlib/circuits/bitify.circom";
+include "../node_modules/circomlib/circuits/switcher.circom";
+
+// Merkle Proof Verification Template
+// Verifies that a leaf is in a Merkle tree with a given root
+template MerkleProof(levels) {
+ signal input leaf;
+ signal input pathElements[levels];
+ signal input leafIndex;
+ signal output root;
+
+ component switcher[levels];
+ component hasher[levels];
+ component indexBits = Num2Bits(levels);
+ indexBits.in <== leafIndex;
+
+ for (var i = 0; i < levels; i++) {
+ switcher[i] = Switcher();
+ switcher[i].L <== i == 0 ? leaf : hasher[i - 1].out;
+ switcher[i].R <== pathElements[i];
+ switcher[i].sel <== indexBits.out[i];
+
+ hasher[i] = Poseidon(2);
+ hasher[i].inputs[0] <== switcher[i].outL;
+ hasher[i].inputs[1] <== switcher[i].outR;
+ }
+
+ root <== hasher[levels - 1].out;
+}
+
+template CompressedAccountHash() {
+ signal input owner_hashed;
+ signal input leaf_index;
+ signal input merkle_tree_hashed;
+ signal input address;
+ signal input discriminator;
+ signal input data_hash;
+ signal output hash;
+
+ component poseidon = Poseidon(6);
+ poseidon.inputs[0] <== owner_hashed;
+ poseidon.inputs[1] <== leaf_index;
+ poseidon.inputs[2] <== merkle_tree_hashed;
+ poseidon.inputs[3] <== address;
+ poseidon.inputs[4] <== discriminator + 36893488147419103232;
+ poseidon.inputs[5] <== data_hash;
+ hash <== poseidon.out;
+}
+
+template CompressedAccountMerkleProof(levels) {
+ signal input owner_hashed;
+ signal input merkle_tree_hashed;
+ signal input discriminator;
+ signal input data_hash;
+ signal input expectedRoot;
+
+ signal input leaf_index;
+ signal input account_leaf_index;
+ signal input address;
+ signal input pathElements[levels];
+
+ component accountHasher = CompressedAccountHash();
+ accountHasher.owner_hashed <== owner_hashed;
+ accountHasher.leaf_index <== account_leaf_index;
+ accountHasher.address <== address;
+ accountHasher.merkle_tree_hashed <== merkle_tree_hashed;
+ accountHasher.discriminator <== discriminator;
+ accountHasher.data_hash <== data_hash;
+
+ component merkleProof = MerkleProof(levels);
+ merkleProof.leaf <== accountHasher.hash;
+ merkleProof.pathElements <== pathElements;
+ merkleProof.leafIndex <== leaf_index;
+ merkleProof.root === expectedRoot;
+}
+
+component main {
+ public [owner_hashed, merkle_tree_hashed, discriminator, data_hash, expectedRoot]
+} = CompressedAccountMerkleProof(26);
+```
diff --git a/snippets/code-snippets/zk/merkle-proof/program.mdx b/snippets/code-snippets/zk/merkle-proof/program.mdx
new file mode 100644
index 0000000..e309ed7
--- /dev/null
+++ b/snippets/code-snippets/zk/merkle-proof/program.mdx
@@ -0,0 +1,172 @@
+```rust expandable
+#![allow(unexpected_cfgs)]
+#![allow(deprecated)]
+
+use anchor_lang::prelude::*;
+use borsh::{BorshDeserialize, BorshSerialize};
+use groth16_solana::groth16::Groth16Verifier;
+use light_hasher::to_byte_array::ToByteArray;
+use light_hasher::HasherError;
+use light_sdk::account::poseidon::LightAccount as LightAccountPoseidon;
+use light_sdk::address::v2::derive_address;
+use light_sdk::cpi::v1::CpiAccounts;
+use light_sdk::{
+ cpi::{v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction},
+ derive_light_cpi_signer,
+ instruction::{CompressedProof, PackedAddressTreeInfo, ValidityProof},
+ merkle_tree::v1::read_state_merkle_tree_root,
+ LightDiscriminator, LightHasher,
+};
+use light_sdk_types::CpiSigner;
+
+declare_id!("MPzkYomvQc4VQPwMr6bFduyWRQZVCh5CofgDC4dFqJp");
+
+pub const LIGHT_CPI_SIGNER: CpiSigner =
+ derive_light_cpi_signer!("MPzkYomvQc4VQPwMr6bFduyWRQZVCh5CofgDC4dFqJp");
+
+pub const ZK_ACCOUNT: &[u8] = b"zk_account";
+
+pub mod verifying_key;
+
+#[program]
+pub mod zk_merkle_proof {
+ use groth16_solana::decompression::{decompress_g1, decompress_g2};
+ use light_hasher::hash_to_field_size::hashv_to_bn254_field_size_be_const_array;
+
+ use super::*;
+
+ pub fn create_account<'info>(
+ ctx: Context<'_, '_, '_, 'info, CreateAccountAccounts<'info>>,
+ proof: ValidityProof,
+ address_tree_info: PackedAddressTreeInfo,
+ output_state_tree_index: u8,
+ data_hash: [u8; 32],
+ ) -> Result<()> {
+ let light_cpi_accounts = CpiAccounts::new(
+ ctx.accounts.signer.as_ref(),
+ ctx.remaining_accounts,
+ crate::LIGHT_CPI_SIGNER,
+ );
+
+ let address_tree_pubkey = address_tree_info
+ .get_tree_pubkey(&light_cpi_accounts)
+ .map_err(|_| ProgramError::InvalidAccountData)?;
+
+ let (address, address_seed) = derive_address(
+ &[ZK_ACCOUNT, &data_hash],
+ &address_tree_pubkey,
+ &crate::ID,
+ );
+
+ let mut account = LightAccountPoseidon::::new_init(
+ &crate::ID,
+ Some(address),
+ output_state_tree_index,
+ );
+
+ account.data_hash = DataHash(data_hash);
+
+ LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof)
+ .with_light_account_poseidon(account)?
+ .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)])
+ .invoke(light_cpi_accounts)?;
+
+ Ok(())
+ }
+
+ pub fn verify_account<'info>(
+ ctx: Context<'_, '_, '_, 'info, VerifyAccountAccounts<'info>>,
+ input_root_index: u16,
+ zk_proof: CompressedProof,
+ data_hash: [u8; 32],
+ ) -> Result<()> {
+ let expected_root = read_state_merkle_tree_root(
+ &ctx.accounts.state_merkle_tree.to_account_info(),
+ input_root_index,
+ )
+ .map_err(ProgramError::from)?;
+
+ let owner_hashed =
+ hashv_to_bn254_field_size_be_const_array::<2>(&[&crate::ID.to_bytes()]).unwrap();
+
+ let merkle_tree_pubkey = ctx.accounts.state_merkle_tree.key();
+ let merkle_tree_hashed =
+ hashv_to_bn254_field_size_be_const_array::<2>(&[&merkle_tree_pubkey.to_bytes()])
+ .unwrap();
+
+ let mut discriminator = [0u8; 32];
+ discriminator[24..].copy_from_slice(ZkAccount::LIGHT_DISCRIMINATOR_SLICE);
+
+ let public_inputs: [[u8; 32]; 5] = [
+ owner_hashed,
+ merkle_tree_hashed,
+ discriminator,
+ data_hash,
+ expected_root,
+ ];
+
+ let proof_a = decompress_g1(&zk_proof.a).map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ let proof_b = decompress_g2(&zk_proof.b).map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ let proof_c = decompress_g1(&zk_proof.c).map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ let mut verifier = Groth16Verifier::new(
+ &proof_a,
+ &proof_b,
+ &proof_c,
+ &public_inputs,
+ &crate::verifying_key::VERIFYINGKEY,
+ )
+ .map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ verifier.verify().map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ Ok(())
+ }
+}
+
+#[derive(Accounts)]
+pub struct CreateAccountAccounts<'info> {
+ #[account(mut)]
+ pub signer: Signer<'info>,
+}
+
+#[derive(Accounts)]
+pub struct VerifyAccountAccounts<'info> {
+ #[account(mut)]
+ pub signer: Signer<'info>,
+ /// CHECK: validated by read_state_merkle_tree_root
+ pub state_merkle_tree: UncheckedAccount<'info>,
+}
+
+#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator, LightHasher)]
+pub struct ZkAccount {
+ pub data_hash: DataHash,
+}
+
+#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize)]
+pub struct DataHash(pub [u8; 32]);
+
+impl ToByteArray for DataHash {
+ const NUM_FIELDS: usize = 1;
+ fn to_byte_array(&self) -> std::result::Result<[u8; 32], HasherError> {
+ Ok(self.0)
+ }
+}
+```
diff --git a/snippets/code-snippets/zk/merkle-proof/rust-client.mdx b/snippets/code-snippets/zk/merkle-proof/rust-client.mdx
new file mode 100644
index 0000000..3ad10bc
--- /dev/null
+++ b/snippets/code-snippets/zk/merkle-proof/rust-client.mdx
@@ -0,0 +1,329 @@
+```rust expandable
+use anchor_lang::{InstructionData, ToAccountMetas};
+use circom_prover::{prover::ProofLib, witness::WitnessFn, CircomProver};
+use groth16_solana::proof_parser::circom_prover::convert_proof;
+use light_hasher::{hash_to_field_size::hash_to_bn254_field_size_be, Hasher, Poseidon};
+use light_program_test::{
+ program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError,
+};
+use light_sdk::{
+ address::v2::derive_address,
+ instruction::{PackedAccounts, SystemAccountMetaConfig},
+};
+use num_bigint::BigUint;
+use solana_sdk::{
+ instruction::Instruction,
+ pubkey::Pubkey,
+ signature::{Keypair, Signature, Signer},
+};
+use std::collections::HashMap;
+use zk_merkle_proof::ZK_ACCOUNT;
+
+#[link(name = "circuit", kind = "static")]
+extern "C" {}
+
+rust_witness::witness!(merkleproof);
+
+#[tokio::test]
+async fn test_create_and_verify_account() {
+ let config =
+ ProgramTestConfig::new(true, Some(vec![("zk_merkle_proof", zk_merkle_proof::ID)]));
+ let mut rpc = LightProgramTest::new(config).await.unwrap();
+ let payer = rpc.get_payer().insecure_clone();
+
+ // First byte = 0 for BN254 field compatibility
+ let mut secret_data = [0u8; 32];
+ for i in 1..32 {
+ secret_data[i] = (i as u8) + 65;
+ }
+ let data_hash = Poseidon::hashv(&[&secret_data]).unwrap();
+
+ let address_tree_info = rpc.get_address_tree_v2();
+
+ let (account_address, _) = derive_address(
+ &[ZK_ACCOUNT, &data_hash],
+ &address_tree_info.tree,
+ &zk_merkle_proof::ID,
+ );
+
+ create_account(&mut rpc, &payer, &account_address, address_tree_info.clone(), data_hash)
+ .await
+ .unwrap();
+
+ let accounts = rpc
+ .get_compressed_accounts_by_owner(&zk_merkle_proof::ID, None, None)
+ .await
+ .unwrap();
+ assert_eq!(accounts.value.items.len(), 1);
+ let created_account = &accounts.value.items[0];
+
+ let account_data_hash = created_account.data.as_ref().unwrap().data_hash;
+
+ verify_account(&mut rpc, &payer, created_account, account_data_hash)
+ .await
+ .unwrap();
+}
+
+async fn create_account(
+ rpc: &mut R,
+ payer: &Keypair,
+ address: &[u8; 32],
+ address_tree_info: light_client::indexer::TreeInfo,
+ data_hash: [u8; 32],
+) -> Result
+where
+ R: Rpc + Indexer,
+{
+ let mut remaining_accounts = PackedAccounts::default();
+ remaining_accounts.add_pre_accounts_signer(payer.pubkey());
+ let config = SystemAccountMetaConfig::new(zk_merkle_proof::ID);
+ remaining_accounts.add_system_accounts_v2(config)?;
+
+ let rpc_result = rpc
+ .get_validity_proof(
+ vec![],
+ vec![AddressWithTree {
+ address: *address,
+ tree: address_tree_info.tree,
+ }],
+ None,
+ )
+ .await?
+ .value;
+
+ let packed_address_tree_accounts = rpc_result
+ .pack_tree_infos(&mut remaining_accounts)
+ .address_trees;
+
+ let output_state_tree_index = rpc
+ .get_random_state_tree_info_v1()?
+ .pack_output_tree_index(&mut remaining_accounts)?;
+
+ let (remaining_accounts_metas, system_accounts_offset, _) = remaining_accounts.to_account_metas();
+
+ let instruction_data = zk_merkle_proof::instruction::CreateAccount {
+ proof: rpc_result.proof,
+ address_tree_info: packed_address_tree_accounts[0],
+ output_state_tree_index,
+ system_accounts_offset: system_accounts_offset as u8,
+ data_hash,
+ };
+
+ let accounts = zk_merkle_proof::accounts::CreateAccountAccounts {
+ signer: payer.pubkey(),
+ };
+
+ let instruction = Instruction {
+ program_id: zk_merkle_proof::ID,
+ accounts: [
+ accounts.to_account_metas(None),
+ remaining_accounts_metas,
+ ]
+ .concat(),
+ data: instruction_data.data(),
+ };
+
+ rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer])
+ .await
+}
+
+async fn verify_account(
+ rpc: &mut R,
+ payer: &Keypair,
+ account: &light_client::indexer::CompressedAccount,
+ data_hash: [u8; 32],
+) -> Result
+where
+ R: Rpc + Indexer,
+{
+ let proofs_result = rpc
+ .get_multiple_compressed_account_proofs(vec![account.hash], None)
+ .await?;
+ let proofs = proofs_result.value.items;
+
+ assert!(!proofs.is_empty(), "No proofs returned");
+
+ let merkle_proof = &proofs[0];
+ let leaf_index = merkle_proof.leaf_index as u32;
+ let merkle_proof_hashes = &merkle_proof.proof;
+ let merkle_root = merkle_proof.root;
+ let root_index = (merkle_proof.root_seq % 2400) as u16;
+ let state_tree = merkle_proof.merkle_tree;
+
+ let zk_proof = generate_merkle_proof(
+ account,
+ &state_tree,
+ leaf_index,
+ merkle_proof_hashes,
+ &merkle_root,
+ &data_hash,
+ );
+
+ let mut remaining_accounts = PackedAccounts::default();
+ let config = SystemAccountMetaConfig::new(zk_merkle_proof::ID);
+ remaining_accounts.add_system_accounts(config)?;
+
+ let instruction_data = zk_merkle_proof::instruction::VerifyAccount {
+ input_root_index: root_index,
+ zk_proof,
+ data_hash,
+ };
+
+ let accounts = zk_merkle_proof::accounts::VerifyAccountAccounts {
+ signer: payer.pubkey(),
+ state_merkle_tree: state_tree,
+ };
+
+ let instruction = Instruction {
+ program_id: zk_merkle_proof::ID,
+ accounts: [
+ accounts.to_account_metas(None),
+ remaining_accounts.to_account_metas().0,
+ ]
+ .concat(),
+ data: instruction_data.data(),
+ };
+
+ rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer])
+ .await
+}
+
+fn generate_merkle_proof(
+ account: &light_client::indexer::CompressedAccount,
+ merkle_tree_pubkey: &Pubkey,
+ leaf_index: u32,
+ merkle_proof_hashes: &[[u8; 32]],
+ merkle_root: &[u8; 32],
+ data_hash: &[u8; 32],
+) -> light_compressed_account::instruction_data::compressed_proof::CompressedProof {
+ let zkey_path = "./build/merkle_proof_final.zkey".to_string();
+
+ let mut proof_inputs = HashMap::new();
+
+ let owner_hashed = hash_to_bn254_field_size_be(zk_merkle_proof::ID.as_ref());
+ let merkle_tree_hashed = hash_to_bn254_field_size_be(merkle_tree_pubkey.as_ref());
+
+ let discriminator = if let Some(ref data) = account.data {
+ data.discriminator
+ } else {
+ [0u8; 8]
+ };
+
+ let address = account.address.expect("Account must have an address");
+
+ // Verify hash can be recreated
+ let mut leaf_index_bytes = [0u8; 32];
+ leaf_index_bytes[28..32].copy_from_slice(&(account.leaf_index as u32).to_le_bytes());
+
+ let mut discriminator_with_domain = [0u8; 32];
+ discriminator_with_domain[24..32].copy_from_slice(&discriminator);
+ discriminator_with_domain[23] = 2;
+
+ let computed_hash = Poseidon::hashv(&[
+ owner_hashed.as_slice(),
+ leaf_index_bytes.as_slice(),
+ merkle_tree_hashed.as_slice(),
+ address.as_slice(),
+ discriminator_with_domain.as_slice(),
+ data_hash.as_slice(),
+ ])
+ .unwrap();
+
+ assert_eq!(computed_hash, account.hash, "Hash mismatch");
+
+ // Public inputs
+ proof_inputs.insert(
+ "owner_hashed".to_string(),
+ vec![BigUint::from_bytes_be(&owner_hashed).to_string()],
+ );
+ proof_inputs.insert(
+ "merkle_tree_hashed".to_string(),
+ vec![BigUint::from_bytes_be(&merkle_tree_hashed).to_string()],
+ );
+ proof_inputs.insert(
+ "discriminator".to_string(),
+ vec![BigUint::from_bytes_be(&discriminator).to_string()],
+ );
+ proof_inputs.insert(
+ "data_hash".to_string(),
+ vec![BigUint::from_bytes_be(data_hash).to_string()],
+ );
+ proof_inputs.insert(
+ "expectedRoot".to_string(),
+ vec![BigUint::from_bytes_be(merkle_root).to_string()],
+ );
+
+ // Private inputs
+ proof_inputs.insert("leaf_index".to_string(), vec![leaf_index.to_string()]);
+
+ let mut account_leaf_index_bytes = [0u8; 32];
+ account_leaf_index_bytes[28..32].copy_from_slice(&(account.leaf_index as u32).to_le_bytes());
+ proof_inputs.insert(
+ "account_leaf_index".to_string(),
+ vec![BigUint::from_bytes_be(&account_leaf_index_bytes).to_string()],
+ );
+
+ proof_inputs.insert(
+ "address".to_string(),
+ vec![BigUint::from_bytes_be(&address).to_string()],
+ );
+
+ let path_elements: Vec = merkle_proof_hashes
+ .iter()
+ .map(|hash| BigUint::from_bytes_be(hash).to_string())
+ .collect();
+ proof_inputs.insert("pathElements".to_string(), path_elements);
+
+ let circuit_inputs = serde_json::to_string(&proof_inputs).unwrap();
+
+ let proof = CircomProver::prove(
+ ProofLib::Arkworks,
+ WitnessFn::RustWitness(merkleproof_witness),
+ circuit_inputs,
+ zkey_path.clone(),
+ )
+ .expect("Proof generation failed");
+
+ let is_valid = CircomProver::verify(ProofLib::Arkworks, proof.clone(), zkey_path.clone())
+ .expect("Proof verification failed");
+ assert!(is_valid, "Local proof verification failed");
+
+ // Verify with groth16-solana
+ {
+ use groth16_solana::groth16::Groth16Verifier;
+ use groth16_solana::proof_parser::circom_prover::convert_public_inputs;
+
+ let (proof_a, proof_b, proof_c) =
+ convert_proof(&proof.proof).expect("Failed to convert proof");
+ let public_inputs_converted: [[u8; 32]; 5] = convert_public_inputs(&proof.pub_inputs);
+
+ let mut verifier = Groth16Verifier::new(
+ &proof_a,
+ &proof_b,
+ &proof_c,
+ &public_inputs_converted,
+ &zk_merkle_proof::verifying_key::VERIFYINGKEY,
+ )
+ .expect("Failed to create verifier");
+
+ verifier.verify().expect("groth16-solana verification failed");
+ }
+
+ let (proof_a_uncompressed, proof_b_uncompressed, proof_c_uncompressed) =
+ convert_proof(&proof.proof).expect("Failed to convert proof");
+
+ use groth16_solana::proof_parser::circom_prover::convert_proof_to_compressed;
+ let (proof_a, proof_b, proof_c) = convert_proof_to_compressed(
+ &proof_a_uncompressed,
+ &proof_b_uncompressed,
+ &proof_c_uncompressed,
+ )
+ .expect("Failed to compress proof");
+
+ light_compressed_account::instruction_data::compressed_proof::CompressedProof {
+ a: proof_a,
+ b: proof_b,
+ c: proof_c,
+ }
+}
+```
diff --git a/snippets/code-snippets/zk/merkle-proof/typescript-client.mdx b/snippets/code-snippets/zk/merkle-proof/typescript-client.mdx
new file mode 100644
index 0000000..aafb96b
--- /dev/null
+++ b/snippets/code-snippets/zk/merkle-proof/typescript-client.mdx
@@ -0,0 +1,332 @@
+```typescript expandable
+import { web3, Program, AnchorProvider, setProvider } from "@coral-xyz/anchor";
+import {
+ bn,
+ createRpc,
+ deriveAddressSeedV2,
+ deriveAddressV2,
+ batchAddressTree,
+ PackedAccounts,
+ Rpc,
+ sleep,
+ SystemAccountMetaConfig,
+ defaultTestStateTreeAccounts,
+ featureFlags,
+ VERSION,
+ confirmTx,
+} from "@lightprotocol/stateless.js";
+import { buildPoseidonOpt } from "circomlibjs";
+import { keccak_256 } from "@noble/hashes/sha3";
+import * as snarkjs from "snarkjs";
+import * as assert from "assert";
+import * as path from "path";
+import * as fs from "fs";
+
+import {
+ parseProofToCompressed,
+ bigintToBytes32,
+ toFieldString,
+ generateFieldElement,
+} from "./utils/proof-helpers";
+
+// Force V2 mode
+(featureFlags as any).version = VERSION.V2;
+
+// Load IDL
+const IDL = JSON.parse(
+ fs.readFileSync(path.join(process.cwd(), "target/idl/zk_merkle_proof.json"), "utf8")
+);
+
+// Program ID from IDL
+const PROGRAM_ID = new web3.PublicKey(IDL.address);
+const ZK_ACCOUNT_PREFIX = Buffer.from("zk_account");
+const ZK_ACCOUNT_DISCRIMINATOR = Buffer.from([0x5b, 0x98, 0xb8, 0x43, 0x93, 0x6c, 0x21, 0xf4]);
+
+// Paths to circuit artifacts
+const BUILD_DIR = path.join(process.cwd(), "build");
+const WASM_PATH = path.join(BUILD_DIR, "merkle_proof_js/merkle_proof.wasm");
+const ZKEY_PATH = path.join(BUILD_DIR, "merkle_proof_final.zkey");
+
+const MERKLE_TREE_DEPTH = 26;
+
+/** Hash to BN254 field (matching Light Protocol's hashv_to_bn254_field_size_be) */
+function hashToBn254Field(data: Uint8Array): Uint8Array {
+ const hash = keccak_256(data);
+ hash[0] = hash[0] & 0x1f;
+ return hash;
+}
+
+describe("zk-merkle-proof", () => {
+ let rpc: Rpc;
+ let signer: web3.Keypair;
+ let poseidon: any;
+ let program: Program;
+
+ before(async () => {
+ rpc = createRpc(
+ "http://127.0.0.1:8899",
+ "http://127.0.0.1:8784",
+ "http://127.0.0.1:3001",
+ { commitment: "confirmed" }
+ );
+
+ signer = web3.Keypair.generate();
+ await rpc.requestAirdrop(signer.publicKey, web3.LAMPORTS_PER_SOL);
+ await sleep(2000);
+
+ poseidon = await buildPoseidonOpt();
+
+ // Setup Anchor provider and program
+ const connection = new web3.Connection("http://127.0.0.1:8899", "confirmed");
+ const wallet = {
+ publicKey: signer.publicKey,
+ signTransaction: async (tx: web3.Transaction) => {
+ tx.sign(signer);
+ return tx;
+ },
+ signAllTransactions: async (txs: web3.Transaction[]) => {
+ txs.forEach((tx) => tx.sign(signer));
+ return txs;
+ },
+ };
+ const provider = new AnchorProvider(connection, wallet as any, { commitment: "confirmed" });
+ setProvider(provider);
+ program = new Program(IDL, provider);
+ });
+
+ after(async () => {
+ // Terminate snarkjs curve worker to allow clean exit
+ // @ts-ignore
+ if (globalThis.curve_bn128) {
+ // @ts-ignore
+ await globalThis.curve_bn128.terminate();
+ }
+ });
+
+ /** Compute Poseidon hash of compressed account fields */
+ function computeAccountHash(
+ ownerHashed: Uint8Array,
+ leafIndex: bigint,
+ merkleTreeHashed: Uint8Array,
+ address: Uint8Array,
+ discriminator: bigint,
+ dataHash: Uint8Array
+ ): Uint8Array {
+ const LAMPORTS_OFFSET = 36893488147419103232n;
+ const hash = poseidon([
+ BigInt("0x" + Buffer.from(ownerHashed).toString("hex")),
+ leafIndex,
+ BigInt("0x" + Buffer.from(merkleTreeHashed).toString("hex")),
+ BigInt("0x" + Buffer.from(address).toString("hex")),
+ discriminator + LAMPORTS_OFFSET,
+ BigInt("0x" + Buffer.from(dataHash).toString("hex")),
+ ]);
+ return bigintToBytes32(poseidon.F.toObject(hash));
+ }
+
+ /** Compute Merkle root from leaf and path */
+ function computeMerkleRoot(leaf: Uint8Array, pathElements: Uint8Array[], leafIndex: number): Uint8Array {
+ let current = BigInt("0x" + Buffer.from(leaf).toString("hex"));
+
+ for (let i = 0; i < pathElements.length; i++) {
+ const pathElement = BigInt("0x" + Buffer.from(pathElements[i]).toString("hex"));
+ const isRight = (leafIndex >> i) & 1;
+ const [left, right] = isRight ? [pathElement, current] : [current, pathElement];
+ current = poseidon.F.toObject(poseidon([left, right]));
+ }
+
+ return bigintToBytes32(current);
+ }
+
+ /** Generate ZK proof for Merkle inclusion */
+ async function generateMerkleProof(
+ ownerHashed: Uint8Array,
+ merkleTreeHashed: Uint8Array,
+ discriminator: Uint8Array,
+ dataHash: Uint8Array,
+ expectedRoot: Uint8Array,
+ leafIndex: number,
+ accountLeafIndex: number,
+ address: Uint8Array,
+ pathElements: Uint8Array[]
+ ): Promise<{ a: number[]; b: number[]; c: number[] }> {
+ const inputs = {
+ owner_hashed: toFieldString(ownerHashed),
+ merkle_tree_hashed: toFieldString(merkleTreeHashed),
+ discriminator: toFieldString(discriminator),
+ data_hash: toFieldString(dataHash),
+ expectedRoot: toFieldString(expectedRoot),
+ leaf_index: leafIndex.toString(),
+ account_leaf_index: accountLeafIndex.toString(),
+ address: toFieldString(address),
+ pathElements: pathElements.map(toFieldString),
+ };
+
+ const { proof } = await snarkjs.groth16.fullProve(inputs, WASM_PATH, ZKEY_PATH);
+ return parseProofToCompressed(proof);
+ }
+
+ /** Build create_account instruction using Anchor */
+ async function buildCreateAccountInstruction(dataHash: Uint8Array): Promise {
+ const addressTree = new web3.PublicKey(batchAddressTree);
+ const outputStateTree = defaultTestStateTreeAccounts().merkleTree;
+
+ const seed = deriveAddressSeedV2([ZK_ACCOUNT_PREFIX, dataHash]);
+ const address = deriveAddressV2(seed, addressTree, PROGRAM_ID);
+
+ const proofResult = await rpc.getValidityProofV0(
+ [],
+ [{ tree: addressTree, queue: addressTree, address: bn(address.toBytes()) }]
+ );
+
+ const remainingAccounts = new PackedAccounts();
+ remainingAccounts.addPreAccountsSigner(signer.publicKey);
+ remainingAccounts.addSystemAccountsV2(SystemAccountMetaConfig.new(PROGRAM_ID));
+
+ const addressMerkleTreeIndex = remainingAccounts.insertOrGet(addressTree);
+ const outputStateTreeIndex = remainingAccounts.insertOrGet(outputStateTree);
+
+ const { remainingAccounts: accountMetas, systemStart } = remainingAccounts.toAccountMetas();
+
+ // Use Anchor to build instruction
+ // ValidityProof is a struct with an unnamed Option field
+ const proof = {
+ 0: proofResult.compressedProof,
+ };
+
+ const ix = await program.methods
+ .createAccount(
+ // proof (ValidityProof = struct with Option)
+ proof,
+ // address_tree_info (PackedAddressTreeInfo)
+ {
+ addressMerkleTreePubkeyIndex: addressMerkleTreeIndex,
+ addressQueuePubkeyIndex: addressMerkleTreeIndex,
+ rootIndex: proofResult.rootIndices[0],
+ },
+ // output_state_tree_index
+ outputStateTreeIndex,
+ // system_accounts_offset
+ systemStart,
+ // data_hash
+ Array.from(dataHash)
+ )
+ .accounts({
+ signer: signer.publicKey,
+ })
+ .remainingAccounts(accountMetas)
+ .instruction();
+
+ return ix;
+ }
+
+ describe("create_account", () => {
+ it("should create a compressed account with data hash", async () => {
+ const dataHash = generateFieldElement();
+ console.log("Data hash:", Buffer.from(dataHash).toString("hex").slice(0, 16) + "...");
+
+ const ix = await buildCreateAccountInstruction(dataHash);
+ const computeIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ units: 400_000 });
+
+ const tx = new web3.Transaction().add(computeIx, ix);
+ tx.recentBlockhash = (await rpc.getLatestBlockhash()).blockhash;
+ tx.feePayer = signer.publicKey;
+ tx.sign(signer);
+
+ const sig = await rpc.sendTransaction(tx, [signer]);
+ await confirmTx(rpc, sig);
+
+ console.log("Transaction signature:", sig);
+
+ const slot = await rpc.getSlot();
+ await rpc.confirmTransactionIndexed(slot);
+
+ const accounts = await rpc.getCompressedAccountsByOwner(PROGRAM_ID);
+ assert.ok(accounts.items.length > 0, "Account should be created");
+ console.log("Created accounts:", accounts.items.length);
+ });
+ });
+
+ describe("verify_account (ZK proof)", () => {
+ it("should verify account existence with ZK proof", async () => {
+ const dataHash = generateFieldElement();
+
+ const createIx = await buildCreateAccountInstruction(dataHash);
+ const computeIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ units: 400_000 });
+
+ const createTx = new web3.Transaction().add(computeIx, createIx);
+ createTx.recentBlockhash = (await rpc.getLatestBlockhash()).blockhash;
+ createTx.feePayer = signer.publicKey;
+ createTx.sign(signer);
+
+ await rpc.sendTransaction(createTx, [signer]);
+ await sleep(3000);
+
+ const slot = await rpc.getSlot();
+ await rpc.confirmTransactionIndexed(slot);
+
+ const accounts = await rpc.getCompressedAccountsByOwner(PROGRAM_ID);
+ assert.ok(accounts.items.length > 0, "Should have created account");
+
+ const account = accounts.items[0];
+ console.log("Account hash:", account.hash.toString(16).slice(0, 16) + "...");
+ console.log("Leaf index:", account.leafIndex);
+
+ const merkleProof = await rpc.getValidityProof([account.hash]);
+ console.log("Root index:", merkleProof.rootIndices[0]);
+
+ assert.ok(merkleProof.compressedProof, "Should have compressed proof");
+ assert.ok(merkleProof.rootIndices.length > 0, "Should have root indices");
+
+ console.log("Account verified in state tree");
+ });
+
+ it("should demonstrate ZK proof generation for Merkle inclusion", async () => {
+ const ownerHashed = hashToBn254Field(PROGRAM_ID.toBytes());
+ const merkleTreeHashed = hashToBn254Field(
+ new web3.PublicKey(defaultTestStateTreeAccounts().merkleTree).toBytes()
+ );
+
+ const dataHash = generateFieldElement();
+ const discriminator = new Uint8Array(32);
+ discriminator.set(ZK_ACCOUNT_DISCRIMINATOR, 24);
+
+ const pathElements = Array.from({ length: MERKLE_TREE_DEPTH }, () => new Uint8Array(32));
+ const address = generateFieldElement();
+
+ const accountHash = computeAccountHash(
+ ownerHashed,
+ 0n,
+ merkleTreeHashed,
+ address,
+ BigInt("0x" + Buffer.from(discriminator).toString("hex")),
+ dataHash
+ );
+
+ const expectedRoot = computeMerkleRoot(accountHash, pathElements, 0);
+
+ console.log("Account hash:", Buffer.from(accountHash).toString("hex").slice(0, 16) + "...");
+ console.log("Expected root:", Buffer.from(expectedRoot).toString("hex").slice(0, 16) + "...");
+ console.log("Generating ZK proof...");
+
+ const zkProof = await generateMerkleProof(
+ ownerHashed,
+ merkleTreeHashed,
+ discriminator,
+ dataHash,
+ expectedRoot,
+ 0,
+ 0,
+ address,
+ pathElements
+ );
+
+ assert.ok(zkProof.a.length === 32, "Proof A should be 32 bytes");
+ assert.ok(zkProof.b.length === 64, "Proof B should be 64 bytes");
+ assert.ok(zkProof.c.length === 32, "Proof C should be 32 bytes");
+
+ console.log("ZK Merkle proof generated successfully");
+ });
+ });
+});
+```
diff --git a/snippets/code-snippets/zk/nullifier/circuit.mdx b/snippets/code-snippets/zk/nullifier/circuit.mdx
new file mode 100644
index 0000000..dc584d4
--- /dev/null
+++ b/snippets/code-snippets/zk/nullifier/circuit.mdx
@@ -0,0 +1,32 @@
+```javascript expandable
+pragma circom 2.0.0;
+
+include "../node_modules/circomlib/circuits/poseidon.circom";
+
+// Single nullifier: proves nullifier = Poseidon(verification_id, secret)
+template Nullifier() {
+ signal input verification_id;
+ signal input nullifier;
+ signal input secret;
+
+ component hasher = Poseidon(2);
+ hasher.inputs[0] <== verification_id;
+ hasher.inputs[1] <== secret;
+ nullifier === hasher.out;
+}
+
+// Batch nullifier: proves n nullifiers with single proof
+template BatchNullifier(n) {
+ signal input verification_id;
+ signal input nullifier[n];
+ signal input secret[n];
+
+ component nullifiers[n];
+ for (var i = 0; i < n; i++) {
+ nullifiers[i] = Nullifier();
+ nullifiers[i].verification_id <== verification_id;
+ nullifiers[i].nullifier <== nullifier[i];
+ nullifiers[i].secret <== secret[i];
+ }
+}
+```
diff --git a/snippets/code-snippets/zk/nullifier/program.mdx b/snippets/code-snippets/zk/nullifier/program.mdx
new file mode 100644
index 0000000..6163d8c
--- /dev/null
+++ b/snippets/code-snippets/zk/nullifier/program.mdx
@@ -0,0 +1,248 @@
+```rust expandable
+#![allow(unexpected_cfgs)]
+#![allow(deprecated)]
+
+use anchor_lang::prelude::*;
+use borsh::{BorshDeserialize, BorshSerialize};
+use groth16_solana::groth16::Groth16Verifier;
+use light_sdk::account::LightAccount;
+use light_sdk::cpi::v2::CpiAccounts;
+use light_sdk::{
+ address::{v2::derive_address, NewAddressParamsAssignedPacked},
+ cpi::{v2::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction},
+ derive_light_cpi_signer,
+ instruction::{CompressedProof, PackedAddressTreeInfo, ValidityProof},
+ LightDiscriminator,
+};
+use light_sdk_types::CpiSigner;
+
+declare_id!("Bw8aty8LJY5Kg2b6djghjWGwt6cBc1tVQUoreUehvVq4");
+
+pub const LIGHT_CPI_SIGNER: CpiSigner =
+ derive_light_cpi_signer!("Bw8aty8LJY5Kg2b6djghjWGwt6cBc1tVQUoreUehvVq4");
+
+pub const NULLIFIER_PREFIX: &[u8] = b"nullifier";
+
+// Customize nullifiers per tx, e.g. 1 (single) or 4 (batch)
+pub const BATCH_SIZE: usize = 4;
+
+pub mod nullifier_1;
+pub mod nullifier_batch_4;
+
+#[program]
+pub mod zk_nullifier {
+ use groth16_solana::decompression::{decompress_g1, decompress_g2};
+
+ use super::*;
+
+ /// Creates 1 nullifier
+ pub fn create_nullifier<'info>(
+ ctx: Context<'_, '_, '_, 'info, CreateNullifierAccounts<'info>>,
+ proof: ValidityProof,
+ address_tree_info: PackedAddressTreeInfo,
+ output_state_tree_index: u8,
+ system_accounts_offset: u8,
+ zk_proof: CompressedProof,
+ verification_id: [u8; 32],
+ nullifier: [u8; 32],
+ ) -> Result<()> {
+ let light_cpi_accounts = CpiAccounts::new(
+ ctx.accounts.signer.as_ref(),
+ &ctx.remaining_accounts[system_accounts_offset as usize..],
+ crate::LIGHT_CPI_SIGNER,
+ );
+
+ let address_tree_pubkey = address_tree_info
+ .get_tree_pubkey(&light_cpi_accounts)
+ .map_err(|_| ErrorCode::AccountNotEnoughKeys)?;
+
+ if address_tree_pubkey.to_bytes() != light_sdk::constants::ADDRESS_TREE_V2 {
+ msg!("Invalid address tree");
+ return Err(ProgramError::InvalidAccountData.into());
+ }
+
+ let public_inputs: [[u8; 32]; 2] = [verification_id, nullifier];
+
+ msg!("Decompressing proof_a...");
+ let proof_a = decompress_g1(&zk_proof.a).map_err(|e| {
+ msg!("decompress_g1 failed for proof_a");
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ msg!("Decompressing proof_b...");
+ let proof_b = decompress_g2(&zk_proof.b).map_err(|e| {
+ msg!("decompress_g2 failed for proof_b");
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ msg!("Decompressing proof_c...");
+ let proof_c = decompress_g1(&zk_proof.c).map_err(|e| {
+ msg!("decompress_g1 failed for proof_c");
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ msg!("Creating verifier...");
+ let mut verifier = Groth16Verifier::new(
+ &proof_a,
+ &proof_b,
+ &proof_c,
+ &public_inputs,
+ &crate::nullifier_1::VERIFYINGKEY,
+ )
+ .map_err(|e| {
+ msg!("Groth16Verifier::new failed");
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ msg!("Verifying proof...");
+ verifier.verify().map_err(|e| {
+ msg!("verifier.verify() failed");
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+ msg!("Proof verified!");
+
+ let (address, address_seed) = derive_address(
+ &[
+ NULLIFIER_PREFIX,
+ nullifier.as_slice(),
+ verification_id.as_slice(),
+ ],
+ &address_tree_pubkey,
+ &crate::ID,
+ );
+
+ let nullifier_account = LightAccount::::new_init(
+ &crate::ID,
+ Some(address),
+ output_state_tree_index,
+ );
+
+ LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof)
+ .with_light_account(nullifier_account)?
+ .with_new_addresses(&[address_tree_info.into_new_address_params_assigned_packed(address_seed, Some(0))])
+ .invoke(light_cpi_accounts)?;
+
+ Ok(())
+ }
+
+ /// Creates 4 nullifiers with single proof
+ pub fn create_batch_nullifier<'info>(
+ ctx: Context<'_, '_, '_, 'info, CreateNullifierAccounts<'info>>,
+ proof: ValidityProof,
+ address_tree_infos: [PackedAddressTreeInfo; BATCH_SIZE],
+ output_state_tree_index: u8,
+ system_accounts_offset: u8,
+ zk_proof: CompressedProof,
+ verification_id: [u8; 32],
+ nullifiers: [[u8; 32]; BATCH_SIZE],
+ ) -> Result<()> {
+ let light_cpi_accounts = CpiAccounts::new(
+ ctx.accounts.signer.as_ref(),
+ &ctx.remaining_accounts[system_accounts_offset as usize..],
+ crate::LIGHT_CPI_SIGNER,
+ );
+
+ let address_tree_pubkey = address_tree_infos[0]
+ .get_tree_pubkey(&light_cpi_accounts)
+ .map_err(|_| ErrorCode::AccountNotEnoughKeys)?;
+
+ if address_tree_pubkey.to_bytes() != light_sdk::constants::ADDRESS_TREE_V2 {
+ msg!("Invalid address tree");
+ return Err(ProgramError::InvalidAccountData.into());
+ }
+
+ // 5 public inputs: verification_id + 4 nullifiers
+ let public_inputs: [[u8; 32]; 5] = [
+ verification_id,
+ nullifiers[0],
+ nullifiers[1],
+ nullifiers[2],
+ nullifiers[3],
+ ];
+
+ let proof_a = decompress_g1(&zk_proof.a).map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ let proof_b = decompress_g2(&zk_proof.b).map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ let proof_c = decompress_g1(&zk_proof.c).map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ let mut verifier = Groth16Verifier::new(
+ &proof_a,
+ &proof_b,
+ &proof_c,
+ &public_inputs,
+ &crate::nullifier_batch_4::VERIFYINGKEY,
+ )
+ .map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ verifier.verify().map_err(|e| {
+ let code: u32 = e.into();
+ Error::from(ProgramError::Custom(code))
+ })?;
+
+ // Create 4 nullifier accounts
+ let mut cpi_builder = LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof);
+ let mut new_address_params: Vec = Vec::with_capacity(BATCH_SIZE);
+
+ for i in 0..BATCH_SIZE {
+ let (address, address_seed) = derive_address(
+ &[
+ NULLIFIER_PREFIX,
+ nullifiers[i].as_slice(),
+ verification_id.as_slice(),
+ ],
+ &address_tree_pubkey,
+ &crate::ID,
+ );
+
+ let nullifier_account = LightAccount::::new_init(
+ &crate::ID,
+ Some(address),
+ output_state_tree_index,
+ );
+
+ cpi_builder = cpi_builder.with_light_account(nullifier_account)?;
+ new_address_params
+ .push(address_tree_infos[i].into_new_address_params_assigned_packed(address_seed, Some(i as u8)));
+ }
+
+ cpi_builder
+ .with_new_addresses(&new_address_params)
+ .invoke(light_cpi_accounts)?;
+
+ Ok(())
+ }
+}
+
+#[derive(Accounts)]
+pub struct CreateNullifierAccounts<'info> {
+ #[account(mut)]
+ pub signer: Signer<'info>,
+}
+
+#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator)]
+pub struct NullifierAccount {}
+
+#[error_code]
+pub enum ErrorCode {
+ #[msg("Not enough keys in remaining accounts")]
+ AccountNotEnoughKeys,
+}
+```
diff --git a/snippets/code-snippets/zk/nullifier/rust-client.mdx b/snippets/code-snippets/zk/nullifier/rust-client.mdx
new file mode 100644
index 0000000..0df5353
--- /dev/null
+++ b/snippets/code-snippets/zk/nullifier/rust-client.mdx
@@ -0,0 +1,441 @@
+```rust expandable
+use anchor_lang::{InstructionData, ToAccountMetas};
+use circom_prover::{prover::ProofLib, witness::WitnessFn, CircomProver};
+use groth16_solana::proof_parser::circom_prover::{convert_proof, convert_proof_to_compressed};
+use light_hasher::{Hasher, Poseidon};
+use light_program_test::{
+ program_test::LightProgramTest, utils::simulate_cu, AddressWithTree, Indexer, ProgramTestConfig,
+ Rpc, RpcError,
+};
+use light_sdk::{
+ address::v2::derive_address,
+ instruction::{PackedAccounts, SystemAccountMetaConfig},
+};
+use num_bigint::BigUint;
+use solana_sdk::{
+ instruction::Instruction,
+ pubkey::Pubkey,
+ signature::{Keypair, Signer},
+};
+use std::collections::HashMap;
+use zk_nullifier::{BATCH_SIZE, NULLIFIER_PREFIX};
+
+#[link(name = "circuit_single", kind = "static")]
+extern "C" {}
+
+#[link(name = "circuit_batch", kind = "static")]
+extern "C" {}
+
+rust_witness::witness!(nullifier1);
+rust_witness::witness!(nullifier4);
+
+// ============================================================================
+// Shared helpers
+// ============================================================================
+
+fn generate_random_secret() -> [u8; 32] {
+ let random_keypair = Keypair::new();
+ let mut secret = [0u8; 32];
+ secret[1..32].copy_from_slice(&random_keypair.to_bytes()[0..31]);
+ secret
+}
+
+fn compute_nullifier(verification_id: &[u8; 32], secret: &[u8; 32]) -> [u8; 32] {
+ Poseidon::hashv(&[verification_id, secret]).unwrap()
+}
+
+fn compress_proof(
+ proof: &circom_prover::prover::circom::Proof,
+) -> light_compressed_account::instruction_data::compressed_proof::CompressedProof {
+ let (proof_a_uncompressed, proof_b_uncompressed, proof_c_uncompressed) =
+ convert_proof(proof).expect("Failed to convert proof");
+
+ let (proof_a, proof_b, proof_c) = convert_proof_to_compressed(
+ &proof_a_uncompressed,
+ &proof_b_uncompressed,
+ &proof_c_uncompressed,
+ )
+ .expect("Failed to compress proof");
+
+ light_compressed_account::instruction_data::compressed_proof::CompressedProof {
+ a: proof_a,
+ b: proof_b,
+ c: proof_c,
+ }
+}
+
+// ============================================================================
+// Single nullifier test
+// ============================================================================
+
+#[tokio::test]
+async fn test_create_nullifier() {
+ let config = ProgramTestConfig::new(true, Some(vec![("zk_nullifier", zk_nullifier::ID)]));
+ let mut rpc = LightProgramTest::new(config).await.unwrap();
+ let payer = rpc.get_payer().insecure_clone();
+
+ let address_tree_info = rpc.get_address_tree_v2();
+
+ let secret = generate_random_secret();
+ let verification_id = Pubkey::new_unique().to_bytes();
+ let nullifier = compute_nullifier(&verification_id, &secret);
+
+ let (nullifier_address, _) = derive_address(
+ &[
+ NULLIFIER_PREFIX,
+ nullifier.as_slice(),
+ verification_id.as_slice(),
+ ],
+ &address_tree_info.tree,
+ &zk_nullifier::ID,
+ );
+
+ let instruction = build_create_nullifier_instruction(
+ &mut rpc,
+ &payer,
+ &nullifier_address,
+ address_tree_info.clone(),
+ &verification_id,
+ &nullifier,
+ &secret,
+ )
+ .await
+ .unwrap();
+
+ let cu = simulate_cu(&mut rpc, &payer, &instruction).await;
+ println!("=== Single nullifier CU: {} ===", cu);
+
+ rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[&payer])
+ .await
+ .unwrap();
+
+ let nullifier_accounts = rpc
+ .get_compressed_accounts_by_owner(&zk_nullifier::ID, None, None)
+ .await
+ .unwrap();
+ assert_eq!(nullifier_accounts.value.items.len(), 1);
+
+ // Duplicate should fail
+ let dup_instruction = build_create_nullifier_instruction(
+ &mut rpc,
+ &payer,
+ &nullifier_address,
+ address_tree_info,
+ &verification_id,
+ &nullifier,
+ &secret,
+ )
+ .await
+ .unwrap();
+
+ let result = rpc
+ .create_and_send_transaction(&[dup_instruction], &payer.pubkey(), &[&payer])
+ .await;
+ assert!(result.is_err());
+}
+
+async fn build_create_nullifier_instruction(
+ rpc: &mut R,
+ payer: &Keypair,
+ address: &[u8; 32],
+ address_tree_info: light_client::indexer::TreeInfo,
+ verification_id: &[u8; 32],
+ nullifier: &[u8; 32],
+ secret: &[u8; 32],
+) -> Result
+where
+ R: Rpc + Indexer,
+{
+ let mut remaining_accounts = PackedAccounts::default();
+ remaining_accounts.add_pre_accounts_signer(payer.pubkey());
+ let config = SystemAccountMetaConfig::new(zk_nullifier::ID);
+ remaining_accounts.add_system_accounts_v2(config)?;
+
+ let rpc_result = rpc
+ .get_validity_proof(
+ vec![],
+ vec![AddressWithTree {
+ address: *address,
+ tree: address_tree_info.tree,
+ }],
+ None,
+ )
+ .await?
+ .value;
+
+ let packed_address_tree_accounts = rpc_result
+ .pack_tree_infos(&mut remaining_accounts)
+ .address_trees;
+
+ let output_state_tree_index = rpc
+ .get_random_state_tree_info()?
+ .pack_output_tree_index(&mut remaining_accounts)?;
+
+ let zk_proof = generate_single_zk_proof(verification_id, nullifier, secret);
+
+ let (remaining_accounts_metas, system_accounts_offset, _) = remaining_accounts.to_account_metas();
+
+ let instruction_data = zk_nullifier::instruction::CreateNullifier {
+ proof: rpc_result.proof,
+ address_tree_info: packed_address_tree_accounts[0],
+ output_state_tree_index,
+ system_accounts_offset: system_accounts_offset as u8,
+ zk_proof,
+ verification_id: *verification_id,
+ nullifier: *nullifier,
+ };
+
+ let accounts = zk_nullifier::accounts::CreateNullifierAccounts {
+ signer: payer.pubkey(),
+ };
+
+ Ok(Instruction {
+ program_id: zk_nullifier::ID,
+ accounts: [
+ accounts.to_account_metas(None),
+ remaining_accounts_metas,
+ ]
+ .concat(),
+ data: instruction_data.data(),
+ })
+}
+
+fn generate_single_zk_proof(
+ verification_id: &[u8; 32],
+ nullifier: &[u8; 32],
+ secret: &[u8; 32],
+) -> light_compressed_account::instruction_data::compressed_proof::CompressedProof {
+ let manifest_dir = env!("CARGO_MANIFEST_DIR");
+ let zkey_path = format!("{}/../../build/nullifier_1_final.zkey", manifest_dir);
+
+ let mut proof_inputs = HashMap::new();
+ proof_inputs.insert(
+ "verification_id".to_string(),
+ vec![BigUint::from_bytes_be(verification_id).to_string()],
+ );
+ proof_inputs.insert(
+ "nullifier".to_string(),
+ vec![BigUint::from_bytes_be(nullifier).to_string()],
+ );
+ proof_inputs.insert(
+ "secret".to_string(),
+ vec![BigUint::from_bytes_be(secret).to_string()],
+ );
+
+ let circuit_inputs = serde_json::to_string(&proof_inputs).unwrap();
+ let proof = CircomProver::prove(
+ ProofLib::Arkworks,
+ WitnessFn::RustWitness(nullifier1_witness),
+ circuit_inputs,
+ zkey_path.clone(),
+ )
+ .expect("Proof generation failed");
+
+ let is_valid = CircomProver::verify(ProofLib::Arkworks, proof.clone(), zkey_path)
+ .expect("Proof verification failed");
+ assert!(is_valid);
+
+ compress_proof(&proof.proof)
+}
+
+// ============================================================================
+// Batch nullifier test
+// ============================================================================
+
+#[tokio::test]
+async fn test_create_batch_nullifier() {
+ let config = ProgramTestConfig::new(true, Some(vec![("zk_nullifier", zk_nullifier::ID)]));
+ let mut rpc = LightProgramTest::new(config).await.unwrap();
+ let payer = rpc.get_payer().insecure_clone();
+
+ let address_tree_info = rpc.get_address_tree_v2();
+
+ let secrets: [[u8; 32]; BATCH_SIZE] = [
+ generate_random_secret(),
+ generate_random_secret(),
+ generate_random_secret(),
+ generate_random_secret(),
+ ];
+ let verification_id = Pubkey::new_unique().to_bytes();
+ let nullifiers: [[u8; 32]; BATCH_SIZE] = [
+ compute_nullifier(&verification_id, &secrets[0]),
+ compute_nullifier(&verification_id, &secrets[1]),
+ compute_nullifier(&verification_id, &secrets[2]),
+ compute_nullifier(&verification_id, &secrets[3]),
+ ];
+
+ let mut addresses = Vec::with_capacity(BATCH_SIZE);
+ for i in 0..BATCH_SIZE {
+ let (addr, _) = derive_address(
+ &[
+ NULLIFIER_PREFIX,
+ nullifiers[i].as_slice(),
+ verification_id.as_slice(),
+ ],
+ &address_tree_info.tree,
+ &zk_nullifier::ID,
+ );
+ addresses.push(addr);
+ }
+
+ let instruction = build_create_batch_nullifier_instruction(
+ &mut rpc,
+ &payer,
+ &addresses,
+ address_tree_info.clone(),
+ &verification_id,
+ &nullifiers,
+ &secrets,
+ )
+ .await
+ .unwrap();
+
+ let cu = simulate_cu(&mut rpc, &payer, &instruction).await;
+ println!("=== Batch (4 nullifiers) CU: {} ===", cu);
+ println!("=== CU per nullifier (batch): {} ===", cu / 4);
+
+ rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[&payer])
+ .await
+ .unwrap();
+
+ let nullifier_accounts = rpc
+ .get_compressed_accounts_by_owner(&zk_nullifier::ID, None, None)
+ .await
+ .unwrap();
+ assert_eq!(nullifier_accounts.value.items.len(), BATCH_SIZE);
+
+ // Duplicate batch should fail
+ let dup_instruction = build_create_batch_nullifier_instruction(
+ &mut rpc,
+ &payer,
+ &addresses,
+ address_tree_info,
+ &verification_id,
+ &nullifiers,
+ &secrets,
+ )
+ .await
+ .unwrap();
+
+ let result = rpc
+ .create_and_send_transaction(&[dup_instruction], &payer.pubkey(), &[&payer])
+ .await;
+ assert!(result.is_err());
+}
+
+async fn build_create_batch_nullifier_instruction(
+ rpc: &mut R,
+ payer: &Keypair,
+ addresses: &[[u8; 32]],
+ address_tree_info: light_client::indexer::TreeInfo,
+ verification_id: &[u8; 32],
+ nullifiers: &[[u8; 32]; BATCH_SIZE],
+ secrets: &[[u8; 32]; BATCH_SIZE],
+) -> Result
+where
+ R: Rpc + Indexer,
+{
+ let mut remaining_accounts = PackedAccounts::default();
+ remaining_accounts.add_pre_accounts_signer(payer.pubkey());
+ let config = SystemAccountMetaConfig::new(zk_nullifier::ID);
+ remaining_accounts.add_system_accounts_v2(config)?;
+
+ let address_with_trees: Vec = addresses
+ .iter()
+ .map(|addr| AddressWithTree {
+ address: *addr,
+ tree: address_tree_info.tree,
+ })
+ .collect();
+
+ let rpc_result = rpc
+ .get_validity_proof(vec![], address_with_trees, None)
+ .await?
+ .value;
+
+ let packed_address_tree_accounts = rpc_result
+ .pack_tree_infos(&mut remaining_accounts)
+ .address_trees;
+
+ let output_state_tree_index = rpc
+ .get_random_state_tree_info()?
+ .pack_output_tree_index(&mut remaining_accounts)?;
+
+ let zk_proof = generate_batch_zk_proof(verification_id, nullifiers, secrets);
+
+ let address_tree_infos: [_; BATCH_SIZE] = [
+ packed_address_tree_accounts[0],
+ packed_address_tree_accounts[1],
+ packed_address_tree_accounts[2],
+ packed_address_tree_accounts[3],
+ ];
+
+ let (remaining_accounts_metas, system_accounts_offset, _) = remaining_accounts.to_account_metas();
+
+ let instruction_data = zk_nullifier::instruction::CreateBatchNullifier {
+ proof: rpc_result.proof,
+ address_tree_infos,
+ output_state_tree_index,
+ system_accounts_offset: system_accounts_offset as u8,
+ zk_proof,
+ verification_id: *verification_id,
+ nullifiers: *nullifiers,
+ };
+
+ let accounts = zk_nullifier::accounts::CreateNullifierAccounts {
+ signer: payer.pubkey(),
+ };
+
+ Ok(Instruction {
+ program_id: zk_nullifier::ID,
+ accounts: [
+ accounts.to_account_metas(None),
+ remaining_accounts_metas,
+ ]
+ .concat(),
+ data: instruction_data.data(),
+ })
+}
+
+fn generate_batch_zk_proof(
+ verification_id: &[u8; 32],
+ nullifiers: &[[u8; 32]; BATCH_SIZE],
+ secrets: &[[u8; 32]; BATCH_SIZE],
+) -> light_compressed_account::instruction_data::compressed_proof::CompressedProof {
+ let manifest_dir = env!("CARGO_MANIFEST_DIR");
+ let zkey_path = format!("{}/../../build/nullifier_4_final.zkey", manifest_dir);
+
+ let mut proof_inputs = HashMap::new();
+ proof_inputs.insert(
+ "verification_id".to_string(),
+ vec![BigUint::from_bytes_be(verification_id).to_string()],
+ );
+
+ let nullifier_strings: Vec = nullifiers
+ .iter()
+ .map(|n| BigUint::from_bytes_be(n).to_string())
+ .collect();
+ proof_inputs.insert("nullifier".to_string(), nullifier_strings);
+
+ let secret_strings: Vec = secrets
+ .iter()
+ .map(|s| BigUint::from_bytes_be(s).to_string())
+ .collect();
+ proof_inputs.insert("secret".to_string(), secret_strings);
+
+ let circuit_inputs = serde_json::to_string(&proof_inputs).unwrap();
+ let proof = CircomProver::prove(
+ ProofLib::Arkworks,
+ WitnessFn::RustWitness(nullifier4_witness),
+ circuit_inputs,
+ zkey_path.clone(),
+ )
+ .expect("Proof generation failed");
+
+ let is_valid = CircomProver::verify(ProofLib::Arkworks, proof.clone(), zkey_path)
+ .expect("Proof verification failed");
+ assert!(is_valid);
+
+ compress_proof(&proof.proof)
+}
+```
diff --git a/snippets/code-snippets/zk/nullifier/typescript-client.mdx b/snippets/code-snippets/zk/nullifier/typescript-client.mdx
new file mode 100644
index 0000000..e066380
--- /dev/null
+++ b/snippets/code-snippets/zk/nullifier/typescript-client.mdx
@@ -0,0 +1,312 @@
+```typescript expandable
+import { web3, Program, AnchorProvider, setProvider } from "@coral-xyz/anchor";
+import {
+ bn,
+ createRpc,
+ deriveAddressSeedV2,
+ deriveAddressV2,
+ batchAddressTree,
+ PackedAccounts,
+ Rpc,
+ sleep,
+ SystemAccountMetaConfig,
+ defaultTestStateTreeAccounts,
+ featureFlags,
+ VERSION,
+ confirmTx,
+} from "@lightprotocol/stateless.js";
+import { buildPoseidonOpt } from "circomlibjs";
+import * as snarkjs from "snarkjs";
+import * as assert from "assert";
+import * as path from "path";
+import * as fs from "fs";
+
+import {
+ parseProofToCompressed,
+ bigintToBytes32,
+ toFieldString,
+ generateFieldElement,
+} from "./utils/proof-helpers";
+
+// Force V2 mode
+(featureFlags as any).version = VERSION.V2;
+
+// Load IDL
+const IDL = JSON.parse(
+ fs.readFileSync(path.join(process.cwd(), "target/idl/zk_nullifier.json"), "utf8")
+);
+
+const PROGRAM_ID = new web3.PublicKey(IDL.address);
+const NULLIFIER_PREFIX = Buffer.from("nullifier");
+
+// Paths to circuit artifacts
+const BUILD_DIR = path.join(process.cwd(), "build");
+const WASM_PATH_SINGLE = path.join(BUILD_DIR, "nullifier_1_js/nullifier_1.wasm");
+const ZKEY_PATH_SINGLE = path.join(BUILD_DIR, "nullifier_1_final.zkey");
+const VKEY_PATH_SINGLE = path.join(BUILD_DIR, "nullifier_1_verification_key.json");
+const WASM_PATH_BATCH = path.join(BUILD_DIR, "nullifier_4_js/nullifier_4.wasm");
+const ZKEY_PATH_BATCH = path.join(BUILD_DIR, "nullifier_4_final.zkey");
+
+describe("zk-nullifier", () => {
+ let rpc: Rpc;
+ let signer: web3.Keypair;
+ let poseidon: any;
+ let program: Program;
+
+ before(async () => {
+ rpc = createRpc(
+ "http://127.0.0.1:8899",
+ "http://127.0.0.1:8784",
+ "http://127.0.0.1:3001",
+ { commitment: "confirmed" }
+ );
+
+ signer = web3.Keypair.generate();
+ await rpc.requestAirdrop(signer.publicKey, web3.LAMPORTS_PER_SOL);
+ await sleep(2000);
+
+ poseidon = await buildPoseidonOpt();
+
+ // Setup Anchor provider and program
+ const connection = new web3.Connection("http://127.0.0.1:8899", "confirmed");
+ const wallet = {
+ publicKey: signer.publicKey,
+ signTransaction: async (tx: web3.Transaction) => {
+ tx.sign(signer);
+ return tx;
+ },
+ signAllTransactions: async (txs: web3.Transaction[]) => {
+ txs.forEach((tx) => tx.sign(signer));
+ return txs;
+ },
+ };
+ const provider = new AnchorProvider(connection, wallet as any, { commitment: "confirmed" });
+ setProvider(provider);
+ program = new Program(IDL, provider);
+ });
+
+ after(async () => {
+ // Terminate snarkjs curve worker to allow clean exit
+ // @ts-ignore
+ if (globalThis.curve_bn128) {
+ // @ts-ignore
+ await globalThis.curve_bn128.terminate();
+ }
+ });
+
+ /** Compute nullifier = Poseidon(verification_id, secret) */
+ function computeNullifier(verificationId: Uint8Array, secret: Uint8Array): Uint8Array {
+ const hash = poseidon([toFieldString(verificationId), toFieldString(secret)].map(BigInt));
+ return bigintToBytes32(poseidon.F.toObject(hash));
+ }
+
+ /** Generate Groth16 proof for single nullifier */
+ async function generateProof(
+ verificationId: Uint8Array,
+ nullifier: Uint8Array,
+ secret: Uint8Array
+ ): Promise<{ a: number[]; b: number[]; c: number[] }> {
+ const inputs = {
+ verification_id: toFieldString(verificationId),
+ nullifier: toFieldString(nullifier),
+ secret: toFieldString(secret),
+ };
+
+ const { proof, publicSignals } = await snarkjs.groth16.fullProve(inputs, WASM_PATH_SINGLE, ZKEY_PATH_SINGLE);
+
+ // Verify locally with snarkjs before converting
+ const vkey = JSON.parse(fs.readFileSync(VKEY_PATH_SINGLE, "utf8"));
+ const isValid = await snarkjs.groth16.verify(vkey, publicSignals, proof);
+ console.log("Local snarkjs verification:", isValid);
+ console.log("Public signals:", publicSignals);
+
+ // Use prover.js logic for proof conversion
+ const compressed = parseProofToCompressed(proof);
+
+ console.log("Compressed proof a (first 8 bytes):", compressed.a.slice(0, 8));
+ console.log("Compressed proof b (first 8 bytes):", compressed.b.slice(0, 8));
+ console.log("Compressed proof c (first 8 bytes):", compressed.c.slice(0, 8));
+
+ return compressed;
+ }
+
+ /** Generate Groth16 proof for batch (4) nullifiers */
+ async function generateBatchProof(
+ verificationId: Uint8Array,
+ nullifiers: Uint8Array[],
+ secrets: Uint8Array[]
+ ): Promise<{ a: number[]; b: number[]; c: number[] }> {
+ const inputs = {
+ verification_id: toFieldString(verificationId),
+ nullifier: nullifiers.map(toFieldString),
+ secret: secrets.map(toFieldString),
+ };
+
+ const { proof } = await snarkjs.groth16.fullProve(inputs, WASM_PATH_BATCH, ZKEY_PATH_BATCH);
+ return parseProofToCompressed(proof);
+ }
+
+ /** Build create_nullifier instruction using Anchor */
+ async function buildCreateNullifierInstruction(
+ verificationId: Uint8Array,
+ nullifier: Uint8Array,
+ secret: Uint8Array
+ ): Promise {
+ const addressTree = new web3.PublicKey(batchAddressTree);
+ const outputStateTree = defaultTestStateTreeAccounts().merkleTree;
+
+ const seed = deriveAddressSeedV2([NULLIFIER_PREFIX, nullifier, verificationId]);
+ const address = deriveAddressV2(seed, addressTree, PROGRAM_ID);
+
+ const proofResult = await rpc.getValidityProofV0(
+ [],
+ [{ tree: addressTree, queue: addressTree, address: bn(address.toBytes()) }]
+ );
+
+ // Use V2 accounts layout (matches on-chain CpiAccounts::new from light_sdk::cpi::v2)
+ const remainingAccounts = new PackedAccounts();
+ remainingAccounts.addPreAccountsSigner(signer.publicKey);
+ remainingAccounts.addSystemAccountsV2(SystemAccountMetaConfig.new(PROGRAM_ID));
+
+ const addressMerkleTreeIndex = remainingAccounts.insertOrGet(addressTree);
+ const outputStateTreeIndex = remainingAccounts.insertOrGet(outputStateTree);
+
+ const zkProof = await generateProof(verificationId, nullifier, secret);
+
+ // Get system_accounts_offset from packed accounts
+ const { remainingAccounts: accountMetas, systemStart } = remainingAccounts.toAccountMetas();
+
+ // Use Anchor to build instruction
+ // ValidityProof is a struct with an unnamed Option field
+ // Anchor JS client uses index-based access for unnamed tuple/option fields
+ const proof = {
+ 0: proofResult.compressedProof,
+ };
+
+ const ix = await program.methods
+ .createNullifier(
+ // proof (ValidityProof = struct with Option)
+ proof,
+ // address_tree_info (PackedAddressTreeInfo)
+ {
+ addressMerkleTreePubkeyIndex: addressMerkleTreeIndex,
+ addressQueuePubkeyIndex: addressMerkleTreeIndex,
+ rootIndex: proofResult.rootIndices[0],
+ },
+ // output_state_tree_index
+ outputStateTreeIndex,
+ // system_accounts_offset
+ systemStart,
+ // zk_proof (CompressedProof)
+ {
+ a: zkProof.a,
+ b: zkProof.b,
+ c: zkProof.c,
+ },
+ // verification_id
+ Array.from(verificationId),
+ // nullifier
+ Array.from(nullifier)
+ )
+ .accounts({
+ signer: signer.publicKey,
+ })
+ .remainingAccounts(accountMetas)
+ .instruction();
+
+ return ix;
+ }
+
+ describe("Single nullifier", () => {
+ it("should create a nullifier with valid ZK proof", async () => {
+ // Use generateFieldElement for verificationId to ensure it's in BN254 field
+ const verificationId = generateFieldElement();
+ const secret = generateFieldElement();
+ const nullifier = computeNullifier(verificationId, secret);
+
+ console.log("Verification ID:", Buffer.from(verificationId).toString("hex").slice(0, 16) + "...");
+ console.log("Nullifier:", Buffer.from(nullifier).toString("hex").slice(0, 16) + "...");
+
+ // Debug: Check if values are within BN254 field
+ const BN254_FR = BigInt('21888242871839275222246405745257275088548364400416034343698204186575808495617');
+ const verIdBigInt = BigInt("0x" + Buffer.from(verificationId).toString("hex"));
+ const nullifierBigInt = BigInt("0x" + Buffer.from(nullifier).toString("hex"));
+ console.log("verificationId < Fr:", verIdBigInt < BN254_FR, "value:", verIdBigInt.toString().slice(0, 20) + "...");
+ console.log("nullifier < Fr:", nullifierBigInt < BN254_FR, "value:", nullifierBigInt.toString().slice(0, 20) + "...");
+
+ const ix = await buildCreateNullifierInstruction(verificationId, nullifier, secret);
+ const computeIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ units: 400_000 });
+
+ const tx = new web3.Transaction().add(computeIx, ix);
+ tx.recentBlockhash = (await rpc.getLatestBlockhash()).blockhash;
+ tx.feePayer = signer.publicKey;
+ tx.sign(signer);
+
+ const sig = await rpc.sendTransaction(tx, [signer]);
+ await confirmTx(rpc, sig);
+
+ console.log("Transaction signature:", sig);
+
+ const slot = await rpc.getSlot();
+ await rpc.confirmTransactionIndexed(slot);
+
+ const accounts = await rpc.getCompressedAccountsByOwner(PROGRAM_ID);
+ assert.ok(accounts.items.length > 0, "Nullifier account should be created");
+ console.log("Created nullifier accounts:", accounts.items.length);
+ });
+
+ it("should reject duplicate nullifier", async () => {
+ // Use generateFieldElement for verificationId to ensure it's in BN254 field
+ const verificationId = generateFieldElement();
+ const secret = generateFieldElement();
+ const nullifier = computeNullifier(verificationId, secret);
+
+ const ix1 = await buildCreateNullifierInstruction(verificationId, nullifier, secret);
+ const computeIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ units: 400_000 });
+
+ const tx1 = new web3.Transaction().add(computeIx, ix1);
+ tx1.recentBlockhash = (await rpc.getLatestBlockhash()).blockhash;
+ tx1.feePayer = signer.publicKey;
+ tx1.sign(signer);
+
+ await rpc.sendTransaction(tx1, [signer]);
+ await sleep(2000);
+
+ // Attempt to create duplicate - should fail when getting validity proof
+ // because the address already exists in the tree
+ try {
+ await buildCreateNullifierInstruction(verificationId, nullifier, secret);
+ assert.fail("Should have rejected duplicate nullifier");
+ } catch (err: any) {
+ // The error should indicate the address already exists
+ assert.ok(
+ err.message.includes("already exists"),
+ `Expected 'already exists' error, got: ${err.message}`
+ );
+ console.log("Duplicate correctly rejected:", err.message);
+ }
+ });
+ });
+
+ describe("Batch nullifier (4x)", () => {
+ it("should create 4 nullifiers with single proof", async () => {
+ // Use generateFieldElement for verificationId to ensure it's in BN254 field
+ const verificationId = generateFieldElement();
+ const secrets = Array.from({ length: 4 }, generateFieldElement);
+ const nullifiers = secrets.map((s) => computeNullifier(verificationId, s));
+
+ console.log("Creating batch of 4 nullifiers...");
+ console.log("Verification ID:", Buffer.from(verificationId).toString("hex").slice(0, 16) + "...");
+
+ const zkProof = await generateBatchProof(verificationId, nullifiers, secrets);
+ console.log("Batch proof generated");
+
+ assert.ok(zkProof.a.length === 32, "Proof A should be 32 bytes");
+ assert.ok(zkProof.b.length === 64, "Proof B should be 64 bytes");
+ assert.ok(zkProof.c.length === 32, "Proof C should be 32 bytes");
+
+ console.log("Batch proof verified locally");
+ });
+ });
+});
+```
diff --git a/snippets/mermaid/nullifier-flow.mdx b/snippets/mermaid/nullifier-flow.mdx
new file mode 100644
index 0000000..fdf228b
--- /dev/null
+++ b/snippets/mermaid/nullifier-flow.mdx
@@ -0,0 +1,18 @@
+```mermaid
+sequenceDiagram
+ participant U as User
+ participant C as Client
+ participant P as Program
+ participant L as Light Protocol
+
+ U->>C: secret + verification_id
+ C->>C: nullifier = Poseidon(vid, secret)
+ C->>C: proof = Groth16.prove(...)
+ C->>P: create_nullifier(proof, vid, nullifier)
+ P->>P: Groth16.verify(proof)
+ P->>L: derive_address(nullifier, vid)
+ L-->>P: address
+ P->>L: create_account(address)
+ L-->>P: success/fail
+ P-->>U: tx result
+```
diff --git a/snippets/overview-tables/zk-examples-table.mdx b/snippets/overview-tables/zk-examples-table.mdx
new file mode 100644
index 0000000..c599b00
--- /dev/null
+++ b/snippets/overview-tables/zk-examples-table.mdx
@@ -0,0 +1,4 @@
+| | Description |
+|:--------|:------------|
+| [ZK-ID](https://github.com/Lightprotocol/program-examples/tree/main/zk/zk-id) | Identity verification using Groth16 proofs. Issuers create credentials; users prove ownership without revealing the credential. |
+| [Nullifier](https://github.com/Lightprotocol/program-examples/tree/main/zk/zk-nullifier) | Simple Program to Create Nullifiers. Requires no custom circuit. |
diff --git a/welcome.mdx b/welcome.mdx
index 9f0a4d6..16f17ef 100644
--- a/welcome.mdx
+++ b/welcome.mdx
@@ -24,7 +24,7 @@ import WelcomePageInstall from "/snippets/setup/welcome-page-install.mdx";
For App State.
diff --git a/zk/compressed-account-zk.mdx b/zk/compressed-account-zk.mdx
new file mode 100644
index 0000000..b26dd83
--- /dev/null
+++ b/zk/compressed-account-zk.mdx
@@ -0,0 +1,507 @@
+---
+title: Compressed Accounts for ZK Applications
+description: ZK applications use Poseidon Hashes instead of SHA-256 to store state in compressed accounts compatible with zero-knowledge circuits.
+keywords: ["compressed accounts zk", "poseidon hashing solana", "groth16 solana", "privacy solana"]
+---
+
+import ZkAppPrompt from "/snippets/ai-prompts/zk-app.mdx";
+import MerkleProofCircuit from '/snippets/code-snippets/zk/merkle-proof/circuit.mdx';
+import MerkleProofProgram from '/snippets/code-snippets/zk/merkle-proof/program.mdx';
+import MerkleProofRustClient from '/snippets/code-snippets/zk/merkle-proof/rust-client.mdx';
+import MerkleProofTypescriptClient from '/snippets/code-snippets/zk/merkle-proof/typescript-client.mdx';
+
+* Compressed and regular Solana accounts share the same functionality and are fully composable.
+* The account state is hashed with Poseidon and stored as a commitment in the state Merkle tree.
+* RPCs that support ZK Compression (Helius, Triton) index state changes.
+
+
+Each tab below includes a full code example. See the full implementation at [program-examples/zk/zk-merkle-proof](https://github.com/Lightprotocol/program-examples/tree/main/zk/zk-merkle-proof).
+
+
+## Implementation guide
+
+
+
+
+
+
+### Dependencies
+
+Add dependencies to your program.
+
+```toml
+[dependencies]
+anchor-lang = "0.31.1"
+borsh = "0.10.4"
+light-sdk = { version = "0.17.1", features = ["anchor", "poseidon", "merkle-tree", "v2"] }
+light-hasher = "5.0.0"
+light-sdk-types = { version = "0.17.1", features = ["v2"] }
+```
+
+* `light-sdk` with `poseidon` feature enables ZK-friendly hashing.
+* `light-hasher` provides Poseidon hash functions.
+
+
+
+
+
+### Constants
+
+Set program address, CPI signer, and include the verifying key from your circuit setup.
+
+```rust
+declare_id!("MPzkYomvQc4VQPwMr6bFduyWRQZVCh5CofgDC4dFqJp");
+
+pub const LIGHT_CPI_SIGNER: CpiSigner =
+ derive_light_cpi_signer!("MPzkYomvQc4VQPwMr6bFduyWRQZVCh5CofgDC4dFqJp");
+
+pub const ZK_ACCOUNT: &[u8] = b"zk_account";
+
+// Generated from circom trusted setup (.zkey file)
+pub mod verifying_key;
+```
+
+
+
+
+
+### Compressed account
+
+
+```rust
+#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator, LightHasher)]
+pub struct ZkAccount {
+ pub data_hash: ZkDataHash,
+}
+```
+
+**ZK-specific differences from regular compressed accounts:**
+
+* Derive `LightHasher` for Poseidon hashing (instead of default SHA-256).
+
+**Custom types need `ToByteArray`:**
+
+```rust
+#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize)]
+pub struct ZkDataHash(pub [u8; 32]);
+
+impl ToByteArray for ZkDataHash {
+ const NUM_FIELDS: usize = 1;
+ fn to_byte_array(&self) -> std::result::Result<[u8; 32], HasherError> {
+ Ok(self.0)
+ }
+}
+```
+
+
+
+
+
+### Instruction data
+
+Define the instruction parameters.
+
+```rust
+pub fn create_account<'info>(
+ ctx: Context<'_, '_, '_, 'info, CreateAccountAccounts<'info>>,
+ proof: ValidityProof,
+ address_tree_info: PackedAddressTreeInfo,
+ output_state_tree_index: u8,
+ data_hash: [u8; 32],
+) -> Result<()>
+```
+
+1. `proof`: Proves the address does not exist yet.
+2. `address_tree_info`: References the address tree for deriving the address.
+3. `output_state_tree_index`: References the state tree for storing the account hash.
+4. `data_hash`: The Poseidon hash of the data to store.
+
+
+
+
+
+### Derive address
+
+Derive the address as a persistent unique identifier.
+
+```rust
+let address_tree_pubkey = address_tree_info
+ .get_tree_pubkey(&light_cpi_accounts)
+ .map_err(|_| ProgramError::InvalidAccountData)?;
+
+let (address, address_seed) = derive_address(
+ &[ZK_ACCOUNT, &data_hash],
+ &address_tree_pubkey,
+ &crate::ID,
+);
+```
+
+
+
+
+
+### Address tree check
+
+Ensure global uniqueness by verifying the address tree pubkey.
+
+```rust
+if address_tree_pubkey.to_bytes() != light_sdk::constants::ADDRESS_TREE_V2 {
+ msg!("Invalid address tree");
+ return Err(ProgramError::InvalidAccountData.into());
+}
+```
+
+
+
+
+
+### Initialize compressed account
+
+
+```rust
+let mut account = LightAccountPoseidon::::new_init(
+ &crate::ID,
+ Some(address),
+ output_state_tree_index,
+);
+
+account.data_hash = ZkDataHash(data_hash);
+```
+
+
+
+
+
+
+### Light System Program CPI
+
+
+```rust
+let light_cpi_accounts = CpiAccounts::new(
+ ctx.accounts.signer.as_ref(),
+ ctx.remaining_accounts,
+ crate::LIGHT_CPI_SIGNER,
+);
+
+LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof)
+ .with_light_account_poseidon(account)?
+ .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)])
+ .invoke(light_cpi_accounts)?;
+```
+
+
+
+## Full code example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ZK clients follow the same pattern as [regular compressed account clients](/client-library/client-guide), with additional steps for generating Groth16 proofs.
+
+## Key points
+
+1. **Derive a new address** or **fetch compressed account** for on-chain verification.
+2. **Fetch validity proof** from the RPC that verifies a new address does not exist in the address tree.
+3. **Pack accounts** with the SDKs helper. Instructions require Light System Program and Merkle tree accounts. `PackedAccounts` converts their pubkeys to `u8` indices pointing to accounts in the instruction.
+4. **Build the instruction** with the current account data, new data, packed accounts and validity proof.
+5. **Generate ZK proof** with snarkjs to generate a Groth16 proof client-side that verifies account state without revealing the Merkle path.
+6. **Build the instruction** with the proof, packed accounts, and account data.
+
+## Get started
+
+
+
+
+### Setup
+
+
+
+
+#### 1. Installation
+
+```bash
+npm install \
+ @lightprotocol/stateless.js@0.22.1-alpha.1 \
+ @solana/web3.js \
+ snarkjs \
+ circomlibjs \
+ @noble/hashes
+```
+
+| ZK Dependencies | Purpose |
+|---------|---------|
+| `snarkjs` | Groth16 proof generation |
+| `circomlibjs` | Poseidon hasher for computing account hashes |
+| `@noble/hashes` | Keccak-256 for BN254 field hashing |
+
+#### 2. Build circuit
+
+```bash
+# Compile circuit
+circom circuits/merkle_proof.circom --r1cs --wasm --sym -o build
+
+# Trusted setup
+snarkjs groth16 setup build/merkle_proof.r1cs pot12_final.ptau build/merkle_proof_0000.zkey
+snarkjs zkey contribute build/merkle_proof_0000.zkey build/merkle_proof_final.zkey
+```
+
+| File | Purpose |
+|------|---------|
+| `merkle_proof.wasm` | Compiled circuit for witness generation |
+| `merkle_proof_final.zkey` | Proving key from trusted setup |
+
+
+
+
+
+
+
+
+### ZK proof
+
+For ZK verification, generate the proof client-side instead of fetching from RPC.
+
+**Step 1: Fetch Merkle proof from indexer**
+
+```typescript
+const account = await rpc.getCompressedAccount(bn(address.toBytes()));
+const merkleProofs = await rpc.getMultipleCompressedAccountProofs([account.hash]);
+```
+
+**Step 2: Hash inputs to BN254 field**
+
+```typescript
+function hashToBn254Field(data: Uint8Array): Uint8Array {
+ const hash = keccak_256(data);
+ hash[0] = hash[0] & 0x1f; // < 2^254
+ return hash;
+}
+
+const ownerHashed = hashToBn254Field(programId.toBytes());
+const merkleTreeHashed = hashToBn254Field(merkleTreePubkey.toBytes());
+```
+
+**Step 3: Compute account hash (Poseidon)**
+
+```typescript
+const LAMPORTS_OFFSET = 36893488147419103232n;
+const accountHash = poseidon([
+ ownerHashed,
+ leafIndex,
+ merkleTreeHashed,
+ address,
+ discriminator + LAMPORTS_OFFSET,
+ dataHash
+]);
+```
+
+**Step 4: Prepare circuit inputs**
+
+```typescript
+const inputs = {
+ // Public inputs (verified on-chain)
+ owner_hashed: toFieldString(ownerHashed),
+ merkle_tree_hashed: toFieldString(merkleTreeHashed),
+ discriminator: toFieldString(discriminator),
+ data_hash: toFieldString(dataHash),
+ expectedRoot: toFieldString(merkleRoot),
+
+ // Private inputs (hidden in proof)
+ leaf_index: leafIndex.toString(),
+ account_leaf_index: accountLeafIndex.toString(),
+ address: toFieldString(address),
+ pathElements: pathElements.map(toFieldString),
+};
+```
+
+**Step 5: Generate Groth16 proof**
+
+```typescript
+const { proof } = await snarkjs.groth16.fullProve(
+ inputs,
+ "build/merkle_proof_js/merkle_proof.wasm",
+ "build/merkle_proof_final.zkey"
+);
+```
+
+**Step 6: Compress proof for on-chain**
+
+```typescript
+const compressedProof = parseProofToCompressed(proof);
+// Returns: { a: 32 bytes, b: 64 bytes, c: 32 bytes }
+```
+
+
+
+
+
+### Instruction data
+
+
+
+
+```typescript
+const instructionData = {
+ proof: { 0: proofResult.compressedProof },
+ addressTreeInfo: {
+ addressMerkleTreePubkeyIndex: addressMerkleTreeIndex,
+ addressQueuePubkeyIndex: addressMerkleTreeIndex,
+ rootIndex: proofResult.rootIndices[0],
+ },
+ outputStateTreeIndex,
+ dataHash: Array.from(dataHash),
+};
+```
+
+
+
+
+
+```typescript
+const instructionData = {
+ inputRootIndex: merkleProof.rootIndex,
+ zkProof: compressedProof, // { a: [...], b: [...], c: [...] }
+ dataHash: Array.from(dataHash),
+};
+```
+
+The compressed proof format:
+- `a`: 32 bytes (G1 X coordinate + sign bit)
+- `b`: 64 bytes (G2 X coordinates + sign bit)
+- `c`: 32 bytes (G1 X coordinate + sign bit)
+
+
+
+
+
+
+
+
+### Instruction
+
+Build the instruction with Anchor.
+
+```typescript
+const instruction = await program.methods
+ .createAccount(
+ proof,
+ addressTreeInfo,
+ outputStateTreeIndex,
+ systemStart,
+ Array.from(dataHash)
+ )
+ .accounts({ signer: signer.publicKey })
+ .remainingAccounts(remainingAccounts)
+ .instruction();
+```
+
+
+
+
+
+### Send transaction
+
+```typescript
+const tx = new web3.Transaction().add(computeBudgetIx, instruction);
+tx.recentBlockhash = (await rpc.getLatestBlockhash()).blockhash;
+tx.feePayer = signer.publicKey;
+tx.sign(signer);
+
+const sig = await rpc.sendTransaction(tx, [signer]);
+```
+
+
+
+
+## Full code example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+This is an example circuit to include compressed accounts.
+
+
+
+## Full code example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+For AI assistance with your ZK App, copy this prompt and add your design ideas:
+
+
+
+## Full code example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+## Next steps
+
+
\ No newline at end of file
diff --git a/zk/examples.mdx b/zk/examples.mdx
new file mode 100644
index 0000000..cb45dc4
--- /dev/null
+++ b/zk/examples.mdx
@@ -0,0 +1,10 @@
+---
+title: Examples
+description: Example projects for building privacy applications on Solana.
+keywords: ["privacy examples solana", "zk examples solana", "private payments examples", "zk identity solana"]
+---
+
+import ZkExamplesTable from "/snippets/overview-tables/zk-examples-table.mdx";
+
+
+
diff --git a/zk/nullifiers.mdx b/zk/nullifiers.mdx
new file mode 100644
index 0000000..8d837dc
--- /dev/null
+++ b/zk/nullifiers.mdx
@@ -0,0 +1,145 @@
+---
+title: Nullifier Guide
+description: "Nullifiers prevent double-spending of state without revealing which state was spent."
+---
+
+import ZkAppPrompt from "/snippets/ai-prompts/zk-app.mdx";
+import NullifierCircuit from '/snippets/code-snippets/zk/nullifier/circuit.mdx';
+import NullifierProgram from '/snippets/code-snippets/zk/nullifier/program.mdx';
+import NullifierRustClient from '/snippets/code-snippets/zk/nullifier/rust-client.mdx';
+import NullifierTypescriptClient from '/snippets/code-snippets/zk/nullifier/typescript-client.mdx';
+
+---
+
+* On Solana nullifiers require a data structure that ensures the nullifier is only created once.
+* A straight forward way is to derive a PDA with the nullifier as seed for the PDA account.
+* Nullifier accounts must remain active, hence lock ~0.001 SOL in rent per nullifier PDA permanently.
+* A rent-free alternative are compressed addresses. It can be be created once and is derived from a seed similar to a pda. This way you can insert nullifiers at a fraction of the cost to create a PDA.
+
+| Storage | Cost per nullifier |
+|---------|-------------------|
+| PDA | ~0.001 SOL |
+| Compressed PDA | ~0.000005 SOL |
+
+
+To create the compressed PDA and a CPI to the Light system program you fetch an additional ZK proof from your RPC.
+If you're already generating a ZK proof for your application logic, the marginal cost of the extra proof is low.
+
+
+## Implementation Guide
+
+This is the complete flow of how nullifiers are used in zk applications.
+
+
+Each tab includes a full code example section at the bottom.
+
+
+
+
+
+
+### Client computes the nullifier
+
+The nullifier combines a context (e.g., `verification_id`) with the user's secret:
+
+```rust
+fn compute_nullifier(
+ verification_id: &[u8; 32],
+ secret: &[u8; 32],
+) -> [u8; 32] {
+ Poseidon::hashv(&[verification_id, secret]).unwrap()
+}
+```
+
+
+
+
+### Derive address from nullifier on-chain
+
+The program derives a deterministic address from the nullifier:
+
+```rust
+let (address, address_seed) = derive_address(
+ &[
+ NULLIFIER_PREFIX, // prefix
+ nullifier.as_slice(), // nullifier hash
+ verification_id.as_slice(), // context
+ ],
+ &address_tree_pubkey,
+ &crate::ID,
+);
+```
+
+
+
+### Create account at that address
+
+Create a compressed account at the derived address:
+
+```rust
+let nullifier_account = LightAccount::::new_init(
+ &crate::ID,
+ Some(address),
+ output_state_tree_index,
+);
+```
+
+The nullifier now prevents double spending:
+1. Same secret + same context = same nullifier
+2. Same nullifier = same derived address
+3. Address already exists = transaction fails
+
+
+
+
+## Full code example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+For AI assistance with your ZK App, copy this prompt and add your design ideas:
+
+
+
+## Full code example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+## Next Steps
+
+
\ No newline at end of file
diff --git a/zk/overview.mdx b/zk/overview.mdx
new file mode 100644
index 0000000..db70529
--- /dev/null
+++ b/zk/overview.mdx
@@ -0,0 +1,123 @@
+---
+title: Primitives for ZK on Solana
+sidebarTitle: Overview
+description: Overview how to build a ZK program on Solana.
+keywords: ["nullifiers on Solana", "zcash on solana", "privacy on solana", "zk on solana", "solana privacy hackathon", "private payments solana", "privacy tooling solana"]
+---
+
+import ZkExamplesTable from "/snippets/overview-tables/zk-examples-table.mdx";
+
+---
+
+Building a ZK Solana program requires:
+1. Nullifiers to prevent double spending
+2. Proof verification
+3. A Merkle tree to store state,
+4. An indexer to serve Merkle proofs, and
+5. Encrypted state.
+
+## Nullifiers on Solana
+
+A nullifier is a deterministically derived hash to ensure an action can only be performed once without linking it back to the action.
+Attempting to consume state twice requires the same nullifier, hence the transaction would fail.
+For example Zcash uses nullifiers to prevent double spending.
+
+* On Solana nullifiers require a data structure that ensures the nullifier is only created once.
+* A straight forward way is to derive a PDA with the nullifier as seed for the PDA account.
+* Nullifier accounts must remain active, hence lock ~0.001 SOL in rent per nullifier PDA permanently.
+* Compressed addresses are rent-free, provide similar functionality and derivation while not requiring a custom circuit.
+
+| Storage | Cost per nullifier |
+|---------|-------------------|
+| PDA | ~0.001 SOL |
+| Compressed PDA | ~0.000015 SOL |
+
+
+[See full example with tests on Github](https://github.com/Lightprotocol/program-examples/tree/main/zk/nullifier).
+
+
+```rust
+// add to your program
+use anchor_lang::prelude::*;
+use nullifier_creation::{create_nullifiers, NullifierInstructionData};
+
+declare_id!("Bw8aty8LJY5Kg2b6djghjWGwt6cBc1tVQUoreUehvVq4");
+
+#[program]
+pub mod zk_nullifier {
+ use super::*;
+
+ pub fn create_nullifier<'info>(
+ ctx: Context<'_, '_, '_, 'info, CreateNullifierAccounts<'info>>,
+ data: NullifierInstructionData,
+ nullifiers: Vec<[u8; 32]>,
+ ) -> Result<()> {
+
+ create_nullifiers(
+ &nullifiers,
+ data,
+ ctx.accounts.signer.as_ref(),
+ ctx.remaining_accounts,
+ )
+ }
+}
+
+#[derive(Accounts)]
+pub struct CreateNullifierAccounts<'info> {
+ #[account(mut)]
+ pub signer: Signer<'info>,
+}
+```
+
+## Groth16 Proof Verification on Solana
+
+Groth16's small proof size and fast verification (~200k compute units) make it the practical choice for Solana.
+
+
+Find more information on [docs.rs](https://docs.rs/groth16-solana) and [Github](https://github.com/Lightprotocol/groth16-solana).
+
+
+```rust
+let mut public_inputs_vec = Vec::new();
+for input in PUBLIC_INPUTS.chunks(32) {
+ public_inputs_vec.push(input);
+}
+
+let proof_a: G1 =
+ ::read(&*[&change_endianness(&PROOF[0..64])[..], &[0u8][..]].concat())
+ .unwrap();
+let mut proof_a_neg = [0u8; 65];
+::write(&proof_a.neg(), &mut proof_a_neg[..]).unwrap();
+
+let proof_a = change_endianness(&proof_a_neg[..64]).try_into().unwrap();
+let proof_b = PROOF[64..192].try_into().unwrap();
+let proof_c = PROOF[192..256].try_into().unwrap();
+
+let mut verifier = Groth16Verifier::new(
+ &proof_a,
+ &proof_b,
+ &proof_c,
+ public_inputs_vec.as_slice(),
+ &VERIFYING_KEY,
+)
+.unwrap();
+verifier.verify().unwrap();
+```
+
+## Merklelized State with Indexer Support
+
+ZK applications on Solana can use existing state Merkle trees to store state in rent-free accounts.
+* This way you don't need to maintain your own Merkle tree and indexer.
+* RPCs that support ZK Compression (Helius, Triton) index state changes.
+
+| Creation | Regular | Compressed |
+| :------------- | :--------------------- | :---------------------- |
+| 100-byte PDA | ~0.002 SOL | ~0.000015 SOL |
+
+
+Your circuit must include compressed accounts. Find [guides to compressed accounts in the documentation](/compressed-pdas/overview) and the [full example with zk implementation here](https://github.com/Lightprotocol/program-examples/blob/99d260f9f356743b8fe3501c684f7926930d6079/zk-id/circuits/compressed_account.circom).
+
+
+## Get Started & Examples
+
+
\ No newline at end of file