repo_id
stringclasses 563
values | file_path
stringlengths 40
166
| content
stringlengths 1
2.94M
| __index_level_0__
int64 0
0
|
|---|---|---|---|
solana_public_repos/Lightprotocol/light-protocol/examples/browser
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/.eslintrc.json
|
{
"extends": "next/core-web-vitals"
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/public/vercel.svg
|
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/public/next.svg
|
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/src/app/page.module.css
|
.main {
display: flex;
flex-direction: column;
justify-content: space-between;
align-items: center;
padding: 6rem;
min-height: 100vh;
}
.description {
display: inherit;
justify-content: inherit;
align-items: inherit;
font-size: 0.85rem;
max-width: var(--max-width);
width: 100%;
z-index: 2;
font-family: var(--font-mono);
}
.description a {
display: flex;
justify-content: center;
align-items: center;
gap: 0.5rem;
}
.description p {
position: relative;
margin: 0;
padding: 1rem;
background-color: rgba(var(--callout-rgb), 0.5);
border: 1px solid rgba(var(--callout-border-rgb), 0.3);
border-radius: var(--border-radius);
}
.code {
font-weight: 700;
font-family: var(--font-mono);
}
.grid {
display: grid;
grid-template-columns: repeat(4, minmax(25%, auto));
max-width: 100%;
width: var(--max-width);
}
.card {
padding: 1rem 1.2rem;
border-radius: var(--border-radius);
background: rgba(var(--card-rgb), 0);
border: 1px solid rgba(var(--card-border-rgb), 0);
transition: background 200ms, border 200ms;
}
.card span {
display: inline-block;
transition: transform 200ms;
}
.card h2 {
font-weight: 600;
margin-bottom: 0.7rem;
}
.card p {
margin: 0;
opacity: 0.6;
font-size: 0.9rem;
line-height: 1.5;
max-width: 30ch;
text-wrap: balance;
}
.center {
display: flex;
justify-content: center;
align-items: center;
position: relative;
padding: 4rem 0;
}
.center::before {
background: var(--secondary-glow);
border-radius: 50%;
width: 480px;
height: 360px;
margin-left: -400px;
}
.center::after {
background: var(--primary-glow);
width: 240px;
height: 180px;
z-index: -1;
}
.center::before,
.center::after {
content: "";
left: 50%;
position: absolute;
filter: blur(45px);
transform: translateZ(0);
}
.logo {
position: relative;
}
/* Enable hover only on non-touch devices */
@media (hover: hover) and (pointer: fine) {
.card:hover {
background: rgba(var(--card-rgb), 0.1);
border: 1px solid rgba(var(--card-border-rgb), 0.15);
}
.card:hover span {
transform: translateX(4px);
}
}
@media (prefers-reduced-motion) {
.card:hover span {
transform: none;
}
}
/* Mobile */
@media (max-width: 700px) {
.content {
padding: 4rem;
}
.grid {
grid-template-columns: 1fr;
margin-bottom: 120px;
max-width: 320px;
text-align: center;
}
.card {
padding: 1rem 2.5rem;
}
.card h2 {
margin-bottom: 0.5rem;
}
.center {
padding: 8rem 0 6rem;
}
.center::before {
transform: none;
height: 300px;
}
.description {
font-size: 0.8rem;
}
.description a {
padding: 1rem;
}
.description p,
.description div {
display: flex;
justify-content: center;
position: fixed;
width: 100%;
}
.description p {
align-items: center;
inset: 0 0 auto;
padding: 2rem 1rem 1.4rem;
border-radius: 0;
border: none;
border-bottom: 1px solid rgba(var(--callout-border-rgb), 0.25);
background: linear-gradient(
to bottom,
rgba(var(--background-start-rgb), 1),
rgba(var(--callout-rgb), 0.5)
);
background-clip: padding-box;
backdrop-filter: blur(24px);
}
.description div {
align-items: flex-end;
pointer-events: none;
inset: auto 0 0;
padding: 2rem;
height: 200px;
background: linear-gradient(
to bottom,
transparent 0%,
rgb(var(--background-end-rgb)) 40%
);
z-index: 1;
}
}
/* Tablet and Smaller Desktop */
@media (min-width: 701px) and (max-width: 1120px) {
.grid {
grid-template-columns: repeat(2, 50%);
}
}
@media (prefers-color-scheme: dark) {
.vercelLogo {
filter: invert(1);
}
.logo {
filter: invert(1) drop-shadow(0 0 0.3rem #ffffff70);
}
}
@keyframes rotate {
from {
transform: rotate(360deg);
}
to {
transform: rotate(0deg);
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/src/app/layout.tsx
|
import type { Metadata } from 'next';
import { Inter } from 'next/font/google';
import './globals.css';
const inter = Inter({ subsets: ['latin'] });
export const metadata: Metadata = {
title: 'Create Next App',
description: 'Generated by create next app',
};
export default function RootLayout({
children,
}: Readonly<{
children: React.ReactNode;
}>) {
return (
<html lang="en">
<body className={inter.className}>{children}</body>
</html>
);
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/src/app/page.tsx
|
'use client';
import React, { FC, useCallback, useMemo } from 'react';
import {
ComputeBudgetProgram,
Keypair,
TransactionMessage,
VersionedTransaction,
} from '@solana/web3.js';
import {
ConnectionProvider,
WalletProvider,
useWallet,
} from '@solana/wallet-adapter-react';
import { WalletNotConnectedError } from '@solana/wallet-adapter-base';
import { UnsafeBurnerWalletAdapter } from '@solana/wallet-adapter-unsafe-burner';
import {
WalletModalProvider,
WalletDisconnectButton,
WalletMultiButton,
} from '@solana/wallet-adapter-react-ui';
import {
LightSystemProgram,
bn,
buildTx,
confirmTx,
defaultTestStateTreeAccounts,
selectMinCompressedSolAccountsForTransfer,
createRpc,
} from '@lightprotocol/stateless.js';
// Default styles that can be overridden by your app
require('@solana/wallet-adapter-react-ui/styles.css');
const SendButton: FC = () => {
const { publicKey, sendTransaction } = useWallet();
const onClick = useCallback(async () => {
const connection = await createRpc();
if (!publicKey) throw new WalletNotConnectedError();
/// airdrop
await confirmTx(
connection,
await connection.requestAirdrop(publicKey, 1e9),
);
/// compress to self
const compressInstruction = await LightSystemProgram.compress({
payer: publicKey,
toAddress: publicKey,
lamports: 1e8,
outputStateTree: defaultTestStateTreeAccounts().merkleTree,
});
const compressInstructions = [
ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }),
compressInstruction,
];
const {
context: { slot: minContextSlot },
value: blockhashCtx,
} = await connection.getLatestBlockhashAndContext();
const tx = buildTx(
compressInstructions,
publicKey,
blockhashCtx.blockhash,
);
const signature = await sendTransaction(tx, connection, {
minContextSlot,
});
await connection.confirmTransaction({
blockhash: blockhashCtx.blockhash,
lastValidBlockHeight: blockhashCtx.lastValidBlockHeight,
signature,
});
console.log(
`Compressed ${1e8} lamports! txId: https://explorer.solana.com/tx/${signature}?cluster=custom`,
);
/// Send compressed SOL to a random address
const recipient = Keypair.generate().publicKey;
/// 1. We need to fetch our sol balance
const accounts =
await connection.getCompressedAccountsByOwner(publicKey);
console.log('accounts', accounts);
const [selectedAccounts, _] = selectMinCompressedSolAccountsForTransfer(
accounts.items,
1e7,
);
console.log('selectedAccounts', selectedAccounts);
/// 2. Retrieve validity proof for our selected balance
const { compressedProof, rootIndices } =
await connection.getValidityProof(
selectedAccounts.map(account => bn(account.hash)),
);
/// 3. Create and send compressed transfer
const sendInstruction = await LightSystemProgram.transfer({
payer: publicKey,
toAddress: recipient,
lamports: 1e7,
inputCompressedAccounts: selectedAccounts,
outputStateTrees: [defaultTestStateTreeAccounts().merkleTree],
recentValidityProof: compressedProof,
recentInputStateRootIndices: rootIndices,
});
const sendInstructions = [
ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }),
sendInstruction,
];
const {
context: { slot: minContextSlotSend },
value: {
blockhash: blockhashSend,
lastValidBlockHeight: lastValidBlockHeightSend,
},
} = await connection.getLatestBlockhashAndContext();
const messageV0Send = new TransactionMessage({
payerKey: publicKey,
recentBlockhash: blockhashSend,
instructions: sendInstructions,
}).compileToV0Message();
const transactionSend = new VersionedTransaction(messageV0Send);
const signatureSend = await sendTransaction(
transactionSend,
connection,
{
minContextSlot: minContextSlotSend,
},
);
await connection.confirmTransaction({
blockhash: blockhashSend,
lastValidBlockHeight: lastValidBlockHeightSend,
signature: signatureSend,
});
console.log(
`Sent ${1e7} lamports to ${recipient.toBase58()} ! txId: https://explorer.solana.com/tx/${signatureSend}?cluster=custom`,
);
}, [publicKey, sendTransaction]);
return (
<button
style={{
fontSize: '1rem',
padding: '1rem',
backgroundColor: '#0066ff',
cursor: 'pointer',
}}
onClick={onClick}
disabled={!publicKey}
>
Get airdrop, compress and send SOL to a random address!
</button>
);
};
export default function Home() {
const endpoint = useMemo(() => 'http://127.0.0.1:8899', []);
const wallets = useMemo(() => [new UnsafeBurnerWalletAdapter()], []);
return (
<ConnectionProvider endpoint={endpoint}>
<WalletProvider wallets={wallets} autoConnect>
<WalletModalProvider>
<WalletMultiButton />
<WalletDisconnectButton />
<div>
<label style={{ fontSize: '1.5rem' }}>
Welcome to this very simple example using
Compression in a browser :)
</label>
</div>
<div>
<label>Check the terminal for tx signatures!</label>
</div>
<SendButton />
</WalletModalProvider>
</WalletProvider>
</ConnectionProvider>
);
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/browser/nextjs/src/app/globals.css
|
:root {
--max-width: 1100px;
--border-radius: 12px;
--font-mono: ui-monospace, Menlo, Monaco, "Cascadia Mono", "Segoe UI Mono",
"Roboto Mono", "Oxygen Mono", "Ubuntu Monospace", "Source Code Pro",
"Fira Mono", "Droid Sans Mono", "Courier New", monospace;
--foreground-rgb: 0, 0, 0;
--background-start-rgb: 214, 219, 220;
--background-end-rgb: 255, 255, 255;
--primary-glow: conic-gradient(
from 180deg at 50% 50%,
#16abff33 0deg,
#0885ff33 55deg,
#54d6ff33 120deg,
#0071ff33 160deg,
transparent 360deg
);
--secondary-glow: radial-gradient(
rgba(255, 255, 255, 1),
rgba(255, 255, 255, 0)
);
--tile-start-rgb: 239, 245, 249;
--tile-end-rgb: 228, 232, 233;
--tile-border: conic-gradient(
#00000080,
#00000040,
#00000030,
#00000020,
#00000010,
#00000010,
#00000080
);
--callout-rgb: 238, 240, 241;
--callout-border-rgb: 172, 175, 176;
--card-rgb: 180, 185, 188;
--card-border-rgb: 131, 134, 135;
}
@media (prefers-color-scheme: dark) {
:root {
--foreground-rgb: 255, 255, 255;
--background-start-rgb: 0, 0, 0;
--background-end-rgb: 0, 0, 0;
--primary-glow: radial-gradient(rgba(1, 65, 255, 0.4), rgba(1, 65, 255, 0));
--secondary-glow: linear-gradient(
to bottom right,
rgba(1, 65, 255, 0),
rgba(1, 65, 255, 0),
rgba(1, 65, 255, 0.3)
);
--tile-start-rgb: 2, 13, 46;
--tile-end-rgb: 2, 5, 19;
--tile-border: conic-gradient(
#ffffff80,
#ffffff40,
#ffffff30,
#ffffff20,
#ffffff10,
#ffffff10,
#ffffff80
);
--callout-rgb: 20, 20, 20;
--callout-border-rgb: 108, 108, 108;
--card-rgb: 100, 100, 100;
--card-border-rgb: 200, 200, 200;
}
}
* {
box-sizing: border-box;
padding: 0;
margin: 0;
}
html,
body {
max-width: 100vw;
overflow-x: hidden;
}
body {
color: rgb(var(--foreground-rgb));
background: linear-gradient(
to bottom,
transparent,
rgb(var(--background-end-rgb))
)
rgb(var(--background-start-rgb));
}
a {
color: inherit;
text-decoration: none;
}
@media (prefers-color-scheme: dark) {
html {
color-scheme: dark;
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/.prettierignore
|
.anchor
.DS_Store
target
node_modules
dist
build
test-ledger
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/Anchor.toml
|
[toolchain]
[features]
seeds = false
skip-lint = false
[programs.localnet]
token_escrow = "GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX"
[programs.testnet]
token_escrow = "GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX"
[registry]
url = "https://api.apr.dev"
[provider]
cluster = "testnet"
wallet = "$HOME/.config/solana/id.json"
[scripts]
test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts"
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/package.json
|
{
"scripts": {
"lint:fix": "prettier \"*/**/*{.js,.ts}\" -w",
"lint": "prettier \"*/**/*{.js,.ts}\" --check",
"test": "cargo test-sbf -p token-escrow -- --test-threads=1"
},
"dependencies": {
"@coral-xyz/anchor": "0.29.0"
},
"devDependencies": {
"chai": "^5.1.2",
"mocha": "^10.7.3",
"ts-mocha": "^10.0.0",
"@types/bn.js": "^5.1.5",
"@types/chai": "^5.0.0",
"@types/mocha": "^10.0.7",
"typescript": "^5.5.4",
"prettier": "^3.4.2"
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/tsconfig.json
|
{
"compilerOptions": {
"types": ["mocha", "chai"],
"typeRoots": ["./node_modules/@types"],
"lib": ["es2015"],
"module": "commonjs",
"target": "es6",
"esModuleInterop": true
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/migrations/deploy.ts
|
// Migrations are an early feature. Currently, they're nothing more than this
// single deploy script that's invoked from the CLI, injecting a provider
// configured from the workspace's Anchor.toml.
const anchor = require("@coral-xyz/anchor");
module.exports = async function (provider) {
// Configure client to use the provider.
anchor.setProvider(provider);
// Add your deploy script here.
};
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/tests/token-escrow.ts
|
import * as anchor from "@coral-xyz/anchor";
import { Program } from "@coral-xyz/anchor";
import { TokenEscrow } from "../target/types/token_escrow";
describe("token-escrow", () => {
// Configure the client to use the local cluster.
anchor.setProvider(anchor.AnchorProvider.env());
const program = anchor.workspace.TokenEscrow as Program<TokenEscrow>;
it("Is initialized!", async () => {
// Add your test here.
const tx = await program.methods.initialize().rpc();
console.log("Your transaction signature", tx);
});
});
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/Cargo.toml
|
[package]
name = "token-escrow"
version = "0.9.0"
description = "Solana escrow implementation using account compression"
repository = "https://github.com/Lightprotocol/light-protocol"
license = "Apache-2.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "lib"]
name = "token_escrow"
[features]
no-entrypoint = []
no-log-ix-name = []
cpi = ["no-entrypoint"]
custom-heap = []
default = ["custom-heap", "idl-build"]
test-sbf = []
idl-build = ["anchor-lang/idl-build", "anchor-spl/idl-build"]
[dependencies]
anchor-lang = { workspace = true, features = ["init-if-needed"] }
light-compressed-token = { workspace = true }
light-system-program = { workspace = true }
account-compression = { workspace = true }
light-hasher = { path = "../../../../merkle-tree/hasher", version = "1.1.0" }
light-verifier = { path = "../../../../circuit-lib/verifier", version = "1.1.0" }
light-sdk = { workspace = true, features = ["legacy"] }
[target.'cfg(not(target_os = "solana"))'.dependencies]
solana-sdk = { workspace = true }
[dev-dependencies]
solana-program-test = { workspace = true }
light-test-utils = { version = "1.2.0", path = "../../../../test-utils", features = ["devenv"] }
light-program-test = { workspace = true, features = ["devenv"] }
reqwest = "0.12"
tokio = { workspace = true }
light-prover-client = { path = "../../../../circuit-lib/light-prover-client", version = "1.2.0" }
num-bigint = "0.4.6"
num-traits = "0.2.19"
spl-token = { workspace = true }
anchor-spl = { workspace = true }
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/Xargo.toml
|
[target.bpfel-unknown-unknown.dependencies.std]
features = []
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/tests/test.rs
|
#![cfg(feature = "test-sbf")]
// TODO: extend this example with a swap function
// TODO: implement a version with delegate and approve
// 1. escrow tokens with pda
// create test env
// create mint and mint tokens
// escrow compressed tokens - with normal pda
// - transfer tokens to compressed token account owned by pda
// - create escrow pda and just prove that utxo exists -> read utxo from compressed token account
// release compressed tokens
use light_hasher::Poseidon;
use light_program_test::test_env::{setup_test_programs_with_accounts, EnvAccounts};
use light_prover_client::gnark::helpers::{ProofType, ProverConfig};
use light_system_program::sdk::{compressed_account::MerkleContext, event::PublicTransactionEvent};
use light_test_utils::indexer::TestIndexer;
use light_test_utils::spl::{create_mint_helper, mint_tokens_helper};
use light_test_utils::{
airdrop_lamports, assert_rpc_error, FeeConfig, Indexer, RpcConnection, RpcError,
TransactionParams,
};
use light_verifier::VerifierError;
use solana_sdk::instruction::Instruction;
use solana_sdk::signature::Keypair;
use solana_sdk::{pubkey::Pubkey, signer::Signer, transaction::Transaction};
use token_escrow::escrow_with_compressed_pda::sdk::get_token_owner_pda;
use token_escrow::escrow_with_pda::sdk::{
create_escrow_instruction, create_withdrawal_escrow_instruction, get_timelock_pda,
CreateEscrowInstructionInputs,
};
use token_escrow::{EscrowError, EscrowTimeLock};
/// Tests:
/// 1. create test env
/// 2. create mint and mint tokens
/// 3. escrow compressed tokens
/// 4. withdraw compressed tokens
/// 5. mint tokens to second payer
/// 6. escrow compressed tokens with lockup time
/// 7. try to withdraw before lockup time
/// 8. try to withdraw with invalid signer
/// 9. withdraw after lockup time
#[tokio::test]
async fn test_escrow_pda() {
let (mut rpc, env) = setup_test_programs_with_accounts(Some(vec![(
String::from("token_escrow"),
token_escrow::ID,
)]))
.await;
let payer = rpc.get_payer().insecure_clone();
let payer_pubkey = payer.pubkey();
let merkle_tree_pubkey = env.merkle_tree_pubkey;
let test_indexer = TestIndexer::init_from_env(
&payer,
&env,
Some(ProverConfig {
run_mode: None,
circuits: vec![ProofType::Inclusion],
}),
);
let mint = create_mint_helper(&mut rpc, &payer).await;
let mut test_indexer = test_indexer.await;
let amount = 10000u64;
mint_tokens_helper(
&mut rpc,
&mut test_indexer,
&merkle_tree_pubkey,
&payer,
&mint,
vec![amount],
vec![payer.pubkey()],
)
.await;
let escrow_amount = 100u64;
let lockup_time = 0u64;
perform_escrow_with_event(
&mut rpc,
&mut test_indexer,
&env,
&payer,
&escrow_amount,
&lockup_time,
)
.await
.unwrap();
assert_escrow(
&mut rpc,
&test_indexer,
&payer_pubkey,
amount,
escrow_amount,
&lockup_time,
)
.await;
println!("withdrawal _----------------------------------------------------------------");
let withdrawal_amount = 50u64;
perform_withdrawal_with_event(
&mut rpc,
&mut test_indexer,
&env,
&payer,
&withdrawal_amount,
None,
)
.await
.unwrap();
assert_withdrawal(
&test_indexer,
&payer_pubkey,
withdrawal_amount,
escrow_amount,
);
let second_payer = Keypair::new();
let second_payer_pubkey = second_payer.pubkey();
println!("second payer pub key {:?}", second_payer_pubkey);
let second_payer_token_balance = 1_000_000_000;
airdrop_lamports(&mut rpc, &second_payer_pubkey, 1_000_000_000)
.await
.unwrap();
mint_tokens_helper(
&mut rpc,
&mut test_indexer,
&merkle_tree_pubkey,
&payer,
&mint,
vec![second_payer_token_balance],
vec![second_payer_pubkey],
)
.await;
let escrow_amount = 100u64;
let lockup_time = 100u64;
perform_escrow_with_event(
&mut rpc,
&mut test_indexer,
&env,
&second_payer,
&escrow_amount,
&lockup_time,
)
.await
.unwrap();
assert_escrow(
&mut rpc,
&test_indexer,
&second_payer_pubkey,
second_payer_token_balance,
escrow_amount,
&lockup_time,
)
.await;
// try withdrawal before lockup time
let withdrawal_amount = 50u64;
let result = perform_withdrawal_failing(
&mut rpc,
&mut test_indexer,
&env,
&second_payer,
&withdrawal_amount,
None,
)
.await;
assert_rpc_error(result, 0, EscrowError::EscrowLocked.into()).unwrap();
rpc.warp_to_slot(1000).await.unwrap();
// try withdrawal with invalid signer
let result = perform_withdrawal_failing(
&mut rpc,
&mut test_indexer,
&env,
&second_payer,
&withdrawal_amount,
Some(payer_pubkey),
)
.await;
assert_rpc_error(result, 0, VerifierError::ProofVerificationFailed.into()).unwrap();
perform_withdrawal_with_event(
&mut rpc,
&mut test_indexer,
&env,
&second_payer,
&withdrawal_amount,
None,
)
.await
.unwrap();
assert_withdrawal(
&test_indexer,
&second_payer_pubkey,
withdrawal_amount,
escrow_amount,
);
}
pub async fn perform_escrow<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
escrow_amount: &u64,
lock_up_time: &u64,
) -> Instruction {
let input_compressed_token_account_data = test_indexer
.token_compressed_accounts
.iter()
.find(|x| {
println!("searching token account: {:?}", x.token_data);
println!("escrow amount: {:?}", escrow_amount);
println!("payer pub key: {:?}", payer.pubkey());
x.token_data.owner == payer.pubkey() && x.token_data.amount >= *escrow_amount
})
.expect("no account with enough tokens")
.clone();
let payer_pubkey = payer.pubkey();
let compressed_input_account_with_context = input_compressed_token_account_data
.compressed_account
.clone();
let input_compressed_account_hash = compressed_input_account_with_context
.compressed_account
.hash::<Poseidon>(
&env.merkle_tree_pubkey,
&compressed_input_account_with_context
.merkle_context
.leaf_index,
)
.unwrap();
let rpc_result = test_indexer
.create_proof_for_compressed_accounts(
Some(&[input_compressed_account_hash]),
Some(&[compressed_input_account_with_context
.merkle_context
.merkle_tree_pubkey]),
None,
None,
rpc,
)
.await;
let create_ix_inputs = CreateEscrowInstructionInputs {
input_token_data: &[input_compressed_token_account_data.token_data.clone()],
lock_up_time: *lock_up_time,
signer: &payer_pubkey,
input_merkle_context: &[MerkleContext {
leaf_index: compressed_input_account_with_context
.merkle_context
.leaf_index,
merkle_tree_pubkey: env.merkle_tree_pubkey,
nullifier_queue_pubkey: env.nullifier_queue_pubkey,
queue_index: None,
}],
output_compressed_account_merkle_tree_pubkeys: &[
env.merkle_tree_pubkey,
env.merkle_tree_pubkey,
],
output_compressed_accounts: &Vec::new(),
root_indices: &rpc_result.root_indices,
proof: &Some(rpc_result.proof),
mint: &input_compressed_token_account_data.token_data.mint,
input_compressed_accounts: &[compressed_input_account_with_context.compressed_account],
};
create_escrow_instruction(create_ix_inputs, *escrow_amount)
}
pub async fn perform_escrow_with_event<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
escrow_amount: &u64,
lock_up_time: &u64,
) -> Result<(), RpcError> {
let instruction =
perform_escrow(rpc, test_indexer, env, payer, escrow_amount, lock_up_time).await;
let rent = rpc
.get_minimum_balance_for_rent_exemption(16)
.await
.unwrap();
let event = rpc
.create_and_send_transaction_with_event::<PublicTransactionEvent>(
&[instruction],
&payer.pubkey(),
&[payer],
Some(TransactionParams {
num_input_compressed_accounts: 1,
num_output_compressed_accounts: 2,
num_new_addresses: 0,
compress: rent as i64,
fee_config: FeeConfig::default(),
}),
)
.await?
.unwrap();
test_indexer.add_compressed_accounts_with_token_data(&event.0);
Ok(())
}
pub async fn perform_escrow_failing<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
escrow_amount: &u64,
lock_up_time: &u64,
) -> Result<solana_sdk::signature::Signature, RpcError> {
let instruction =
perform_escrow(rpc, test_indexer, env, payer, escrow_amount, lock_up_time).await;
let transaction = Transaction::new_signed_with_payer(
&[instruction],
Some(&payer.pubkey()),
&[&payer],
rpc.get_latest_blockhash().await.unwrap(),
);
rpc.process_transaction(transaction).await
}
pub async fn assert_escrow<R: RpcConnection>(
rpc: &mut R,
test_indexer: &TestIndexer<R>,
payer_pubkey: &Pubkey,
amount: u64,
escrow_amount: u64,
lock_up_time: &u64,
) {
let token_owner_pda = get_token_owner_pda(payer_pubkey).0;
let token_data_escrow = test_indexer
.token_compressed_accounts
.iter()
.find(|x| x.token_data.owner == token_owner_pda)
.unwrap()
.token_data
.clone();
assert_eq!(token_data_escrow.amount, escrow_amount);
assert_eq!(token_data_escrow.owner, token_owner_pda);
let token_data_change_compressed_token_account =
test_indexer.token_compressed_accounts[0].token_data.clone();
assert_eq!(
token_data_change_compressed_token_account.amount,
amount - escrow_amount
);
assert_eq!(
token_data_change_compressed_token_account.owner,
*payer_pubkey
);
let time_lock_pubkey = get_timelock_pda(payer_pubkey);
let timelock_account = rpc
.get_anchor_account::<EscrowTimeLock>(&time_lock_pubkey)
.await
.unwrap()
.unwrap();
let current_slot = rpc.get_slot().await.unwrap();
assert_eq!(timelock_account.slot, *lock_up_time + current_slot);
}
pub async fn perform_withdrawal<R: RpcConnection>(
context: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
withdrawal_amount: &u64,
invalid_signer: Option<Pubkey>,
) -> Instruction {
let payer_pubkey = payer.pubkey();
let token_owner_pda = get_token_owner_pda(&invalid_signer.unwrap_or(payer_pubkey)).0;
let escrow_token_data_with_context = test_indexer
.token_compressed_accounts
.iter()
.find(|x| {
x.token_data.owner == token_owner_pda && x.token_data.amount >= *withdrawal_amount
})
.expect("no account with enough tokens")
.clone();
let compressed_input_account_with_context =
escrow_token_data_with_context.compressed_account.clone();
let input_compressed_account_hash = compressed_input_account_with_context
.compressed_account
.hash::<Poseidon>(
&env.merkle_tree_pubkey,
&compressed_input_account_with_context
.merkle_context
.leaf_index,
)
.unwrap();
let rpc_result = test_indexer
.create_proof_for_compressed_accounts(
Some(&[input_compressed_account_hash]),
Some(&[compressed_input_account_with_context
.merkle_context
.merkle_tree_pubkey]),
None,
None,
context,
)
.await;
let create_ix_inputs = CreateEscrowInstructionInputs {
input_token_data: &[escrow_token_data_with_context.token_data.clone()],
lock_up_time: 0,
signer: &payer_pubkey,
input_merkle_context: &[MerkleContext {
leaf_index: compressed_input_account_with_context
.merkle_context
.leaf_index,
merkle_tree_pubkey: env.merkle_tree_pubkey,
nullifier_queue_pubkey: env.nullifier_queue_pubkey,
queue_index: None,
}],
output_compressed_account_merkle_tree_pubkeys: &[
env.merkle_tree_pubkey,
env.merkle_tree_pubkey,
],
output_compressed_accounts: &Vec::new(),
root_indices: &rpc_result.root_indices,
proof: &Some(rpc_result.proof),
mint: &escrow_token_data_with_context.token_data.mint,
input_compressed_accounts: &[compressed_input_account_with_context.compressed_account],
};
create_withdrawal_escrow_instruction(create_ix_inputs, *withdrawal_amount)
}
pub async fn perform_withdrawal_with_event<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
withdrawal_amount: &u64,
invalid_signer: Option<Pubkey>,
) -> Result<(), RpcError> {
let instruction = perform_withdrawal(
rpc,
test_indexer,
env,
payer,
withdrawal_amount,
invalid_signer,
)
.await;
let event = rpc
.create_and_send_transaction_with_event::<PublicTransactionEvent>(
&[instruction],
&payer.pubkey(),
&[payer],
None,
)
.await?
.unwrap();
test_indexer.add_compressed_accounts_with_token_data(&event.0);
Ok(())
}
pub async fn perform_withdrawal_failing<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
withdrawal_amount: &u64,
invalid_signer: Option<Pubkey>,
) -> Result<solana_sdk::signature::Signature, RpcError> {
let instruction = perform_withdrawal(
rpc,
test_indexer,
env,
payer,
withdrawal_amount,
invalid_signer,
)
.await;
let transaction = Transaction::new_signed_with_payer(
&[instruction],
Some(&payer.pubkey()),
&[&payer],
rpc.get_latest_blockhash().await.unwrap(),
);
rpc.process_transaction(transaction).await
}
pub fn assert_withdrawal<R: RpcConnection>(
test_indexer: &TestIndexer<R>,
payer_pubkey: &Pubkey,
withdrawal_amount: u64,
escrow_amount: u64,
) {
let token_owner_pda = get_token_owner_pda(payer_pubkey).0;
let token_data_withdrawal = test_indexer
.token_compressed_accounts
.iter()
.any(|x| x.token_data.owner == *payer_pubkey && x.token_data.amount == withdrawal_amount);
assert!(
token_data_withdrawal,
"Withdrawal compressed account doesn't exist or has incorrect amount {} expected amount",
withdrawal_amount
);
let token_data_escrow_change = test_indexer.token_compressed_accounts.iter().any(|x| {
x.token_data.owner == token_owner_pda
&& x.token_data.amount == escrow_amount - withdrawal_amount
});
assert!(
token_data_escrow_change,
"Escrow change compressed account doesn't exist or has incorrect amount {} expected amount",
escrow_amount - withdrawal_amount
);
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/tests/test_compressed_pda.rs
|
#![cfg(feature = "test-sbf")]
// 2. escrow tokens with compressed pda
// create test env
// create mint and mint tokens
// escrow compressed tokens - with compressed pda
// release compressed tokens
// TODO: 3. escrow tokens by decompression with compressed pda
// this design pattern can be used to use compressed accounts with an AMMM
// create test env
// create mint and mint tokens
// decompress compressed tokens into program owned token account - with compressed pda
// release compressed tokens
use anchor_lang::AnchorDeserialize;
use light_hasher::{Hasher, Poseidon};
use light_program_test::test_env::{setup_test_programs_with_accounts, EnvAccounts};
use light_prover_client::gnark::helpers::{ProverConfig, ProverMode};
use light_system_program::sdk::address::derive_address;
use light_system_program::sdk::compressed_account::MerkleContext;
use light_system_program::sdk::event::PublicTransactionEvent;
use light_system_program::NewAddressParams;
use light_test_utils::indexer::TestIndexer;
use light_test_utils::spl::{create_mint_helper, mint_tokens_helper};
use light_test_utils::{FeeConfig, Indexer, RpcConnection, RpcError, TransactionParams};
use solana_sdk::instruction::{Instruction, InstructionError};
use solana_sdk::signature::Keypair;
use solana_sdk::{signer::Signer, transaction::Transaction};
use token_escrow::escrow_with_compressed_pda::sdk::{
create_escrow_instruction, create_withdrawal_instruction, get_token_owner_pda,
CreateCompressedPdaEscrowInstructionInputs, CreateCompressedPdaWithdrawalInstructionInputs,
};
use token_escrow::{EscrowError, EscrowTimeLock};
#[tokio::test]
async fn test_escrow_with_compressed_pda() {
let (mut rpc, env) = setup_test_programs_with_accounts(Some(vec![(
String::from("token_escrow"),
token_escrow::ID,
)]))
.await;
let payer = rpc.get_payer().insecure_clone();
let test_indexer = TestIndexer::init_from_env(
&payer,
&env,
Some(ProverConfig {
run_mode: Some(ProverMode::Rpc),
circuits: vec![],
}),
);
let mint = create_mint_helper(&mut rpc, &payer).await;
let mut test_indexer = test_indexer.await;
let amount = 10000u64;
mint_tokens_helper(
&mut rpc,
&mut test_indexer,
&env.merkle_tree_pubkey,
&payer,
&mint,
vec![amount],
vec![payer.pubkey()],
)
.await;
let seed = [1u8; 32];
let escrow_amount = 100u64;
let lock_up_time = 1000u64;
perform_escrow_with_event(
&mut test_indexer,
&mut rpc,
&env,
&payer,
lock_up_time,
escrow_amount,
seed,
)
.await
.unwrap();
let current_slot = rpc.get_slot().await.unwrap();
let lockup_end = lock_up_time + current_slot;
assert_escrow(
&mut test_indexer,
&env,
&payer,
&escrow_amount,
&amount,
&seed,
&lockup_end,
)
.await;
println!("withdrawal _----------------------------------------------------------------");
let withdrawal_amount = escrow_amount;
let new_lock_up_time = 2000u64;
let result = perform_withdrawal_failing(
&mut rpc,
&mut test_indexer,
&env,
&payer,
lock_up_time,
new_lock_up_time,
withdrawal_amount,
)
.await;
let instruction_error = InstructionError::Custom(EscrowError::EscrowLocked.into());
let transaction_error =
solana_sdk::transaction::TransactionError::InstructionError(0, instruction_error);
let rpc_error = RpcError::TransactionError(transaction_error);
assert!(matches!(result, Err(error) if error.to_string() == rpc_error.to_string()));
rpc.warp_to_slot(lockup_end + 1).await.unwrap();
perform_withdrawal_with_event(
&mut rpc,
&mut test_indexer,
&env,
&payer,
lockup_end,
new_lock_up_time,
withdrawal_amount,
)
.await
.unwrap();
assert_withdrawal(
&mut rpc,
&mut test_indexer,
&env,
&payer,
&withdrawal_amount,
&escrow_amount,
&seed,
new_lock_up_time,
)
.await;
}
pub async fn perform_escrow_failing<R: RpcConnection>(
test_indexer: &mut TestIndexer<R>,
rpc: &mut R,
env: &EnvAccounts,
payer: &Keypair,
lock_up_time: u64,
escrow_amount: u64,
seed: [u8; 32],
) -> Result<solana_sdk::signature::Signature, RpcError> {
let (payer_pubkey, instruction) = create_escrow_ix(
payer,
test_indexer,
env,
seed,
rpc,
lock_up_time,
escrow_amount,
)
.await;
let latest_blockhash = rpc.get_latest_blockhash().await.unwrap();
let transaction = Transaction::new_signed_with_payer(
&[instruction],
Some(&payer_pubkey),
&[&payer],
latest_blockhash,
);
rpc.process_transaction(transaction).await
}
pub async fn perform_escrow_with_event<R: RpcConnection>(
test_indexer: &mut TestIndexer<R>,
rpc: &mut R,
env: &EnvAccounts,
payer: &Keypair,
lock_up_time: u64,
escrow_amount: u64,
seed: [u8; 32],
) -> Result<(), RpcError> {
let (_, instruction) = create_escrow_ix(
payer,
test_indexer,
env,
seed,
rpc,
lock_up_time,
escrow_amount,
)
.await;
let event = rpc
.create_and_send_transaction_with_event::<PublicTransactionEvent>(
&[instruction],
&payer.pubkey(),
&[payer],
Some(TransactionParams {
num_input_compressed_accounts: 1,
num_output_compressed_accounts: 3,
num_new_addresses: 1,
compress: 0,
fee_config: FeeConfig::default(),
}),
)
.await?;
test_indexer.add_compressed_accounts_with_token_data(&event.unwrap().0);
Ok(())
}
async fn create_escrow_ix<R: RpcConnection>(
payer: &Keypair,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
seed: [u8; 32],
context: &mut R,
lock_up_time: u64,
escrow_amount: u64,
) -> (anchor_lang::prelude::Pubkey, Instruction) {
let payer_pubkey = payer.pubkey();
let input_compressed_token_account_data = test_indexer.token_compressed_accounts[0].clone();
let compressed_input_account_with_context = input_compressed_token_account_data
.compressed_account
.clone();
let input_compressed_account_hash = compressed_input_account_with_context
.compressed_account
.hash::<Poseidon>(
&env.merkle_tree_pubkey,
&compressed_input_account_with_context
.merkle_context
.leaf_index,
)
.unwrap();
let address = derive_address(&env.address_merkle_tree_pubkey, &seed).unwrap();
let rpc_result = test_indexer
.create_proof_for_compressed_accounts(
Some(&[input_compressed_account_hash]),
Some(&[compressed_input_account_with_context
.merkle_context
.merkle_tree_pubkey]),
Some(&[address]),
Some(vec![env.address_merkle_tree_pubkey]),
context,
)
.await;
let new_address_params = NewAddressParams {
seed,
address_merkle_tree_pubkey: env.address_merkle_tree_pubkey,
address_queue_pubkey: env.address_merkle_tree_queue_pubkey,
address_merkle_tree_root_index: rpc_result.address_root_indices[0],
};
let create_ix_inputs = CreateCompressedPdaEscrowInstructionInputs {
input_token_data: &[input_compressed_token_account_data.token_data.clone()],
lock_up_time,
signer: &payer_pubkey,
input_merkle_context: &[MerkleContext {
leaf_index: compressed_input_account_with_context
.merkle_context
.leaf_index,
merkle_tree_pubkey: env.merkle_tree_pubkey,
nullifier_queue_pubkey: env.nullifier_queue_pubkey,
queue_index: None,
}],
output_compressed_account_merkle_tree_pubkeys: &[
env.merkle_tree_pubkey,
env.merkle_tree_pubkey,
],
output_compressed_accounts: &Vec::new(),
root_indices: &rpc_result.root_indices,
proof: &Some(rpc_result.proof),
mint: &input_compressed_token_account_data.token_data.mint,
new_address_params,
cpi_context_account: &env.cpi_context_account_pubkey,
input_compressed_accounts: &[compressed_input_account_with_context.compressed_account],
};
let instruction = create_escrow_instruction(create_ix_inputs.clone(), escrow_amount);
(payer_pubkey, instruction)
}
pub async fn assert_escrow<R: RpcConnection>(
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
escrow_amount: &u64,
amount: &u64,
seed: &[u8; 32],
lock_up_time: &u64,
) {
let payer_pubkey = payer.pubkey();
let token_owner_pda = get_token_owner_pda(&payer_pubkey).0;
let token_data_escrow = test_indexer
.token_compressed_accounts
.iter()
.find(|x| x.token_data.owner == token_owner_pda)
.unwrap()
.token_data
.clone();
assert_eq!(token_data_escrow.amount, *escrow_amount);
assert_eq!(token_data_escrow.owner, token_owner_pda);
let token_data_change_compressed_token_account_exist =
test_indexer.token_compressed_accounts.iter().any(|x| {
x.token_data.owner == payer.pubkey() && x.token_data.amount == amount - escrow_amount
});
assert!(token_data_change_compressed_token_account_exist);
let compressed_escrow_pda = test_indexer
.compressed_accounts
.iter()
.find(|x| x.compressed_account.owner == token_escrow::ID)
.unwrap()
.clone();
let address = derive_address(&env.address_merkle_tree_pubkey, seed).unwrap();
assert_eq!(
compressed_escrow_pda.compressed_account.address.unwrap(),
address
);
assert_eq!(
compressed_escrow_pda.compressed_account.owner,
token_escrow::ID
);
let compressed_escrow_pda_deserialized = compressed_escrow_pda
.compressed_account
.data
.as_ref()
.unwrap();
let compressed_escrow_pda_data =
EscrowTimeLock::deserialize_reader(&mut &compressed_escrow_pda_deserialized.data[..])
.unwrap();
println!(
"compressed_escrow_pda_data {:?}",
compressed_escrow_pda_data
);
assert_eq!(compressed_escrow_pda_data.slot, *lock_up_time);
assert_eq!(
compressed_escrow_pda_deserialized.discriminator,
1u64.to_le_bytes(),
);
assert_eq!(
compressed_escrow_pda_deserialized.data_hash,
Poseidon::hash(&compressed_escrow_pda_data.slot.to_le_bytes()).unwrap(),
);
}
pub async fn perform_withdrawal_with_event<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
old_lock_up_time: u64,
new_lock_up_time: u64,
escrow_amount: u64,
) -> Result<(), RpcError> {
let instruction = perform_withdrawal(
rpc,
test_indexer,
env,
payer,
old_lock_up_time,
new_lock_up_time,
escrow_amount,
)
.await;
let event = rpc
.create_and_send_transaction_with_event::<PublicTransactionEvent>(
&[instruction],
&payer.pubkey(),
&[payer],
None,
)
.await?;
test_indexer.add_compressed_accounts_with_token_data(&event.unwrap().0);
Ok(())
}
pub async fn perform_withdrawal_failing<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
old_lock_up_time: u64,
new_lock_up_time: u64,
escrow_amount: u64,
) -> Result<solana_sdk::signature::Signature, RpcError> {
let instruction = perform_withdrawal(
rpc,
test_indexer,
env,
payer,
old_lock_up_time,
new_lock_up_time,
escrow_amount,
)
.await;
let latest_blockhash = rpc.get_latest_blockhash().await.unwrap();
let transaction = Transaction::new_signed_with_payer(
&[instruction],
Some(&payer.pubkey()),
&[&payer],
latest_blockhash,
);
rpc.process_transaction(transaction).await
}
pub async fn perform_withdrawal<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
old_lock_up_time: u64,
new_lock_up_time: u64,
escrow_amount: u64,
) -> Instruction {
let payer_pubkey = payer.pubkey();
let compressed_escrow_pda = test_indexer
.compressed_accounts
.iter()
.find(|x| x.compressed_account.owner == token_escrow::ID)
.unwrap()
.clone();
println!("compressed_escrow_pda {:?}", compressed_escrow_pda);
let token_owner_pda = get_token_owner_pda(&payer_pubkey).0;
let token_escrow = test_indexer
.token_compressed_accounts
.iter()
.find(|x| x.token_data.owner == token_owner_pda)
.unwrap()
.clone();
let token_escrow_account = token_escrow.compressed_account.clone();
let token_escrow_account_hash = token_escrow_account
.compressed_account
.hash::<Poseidon>(
&env.merkle_tree_pubkey,
&token_escrow_account.merkle_context.leaf_index,
)
.unwrap();
println!("token_data_escrow {:?}", token_escrow);
println!("token escrow_account {:?}", token_escrow_account);
let compressed_pda_hash = compressed_escrow_pda
.compressed_account
.hash::<Poseidon>(
&env.merkle_tree_pubkey,
&compressed_escrow_pda.merkle_context.leaf_index,
)
.unwrap();
println!("compressed_pda_hash {:?}", compressed_pda_hash);
println!("token_escrow_account_hash {:?}", token_escrow_account_hash);
// compressed pda will go first into the proof because in the program
// the compressed pda program executes the transaction
let rpc_result = test_indexer
.create_proof_for_compressed_accounts(
Some(&[compressed_pda_hash, token_escrow_account_hash]),
Some(&[
compressed_escrow_pda.merkle_context.merkle_tree_pubkey,
token_escrow_account.merkle_context.merkle_tree_pubkey,
]),
None,
None,
rpc,
)
.await;
let create_withdrawal_ix_inputs = CreateCompressedPdaWithdrawalInstructionInputs {
input_token_data: &[token_escrow.token_data.clone()],
signer: &payer_pubkey,
input_token_escrow_merkle_context: MerkleContext {
leaf_index: token_escrow_account.merkle_context.leaf_index,
merkle_tree_pubkey: env.merkle_tree_pubkey,
nullifier_queue_pubkey: env.nullifier_queue_pubkey,
queue_index: None,
},
input_cpda_merkle_context: MerkleContext {
leaf_index: compressed_escrow_pda.merkle_context.leaf_index,
merkle_tree_pubkey: env.merkle_tree_pubkey,
nullifier_queue_pubkey: env.nullifier_queue_pubkey,
queue_index: None,
},
output_compressed_account_merkle_tree_pubkeys: &[
env.merkle_tree_pubkey,
env.merkle_tree_pubkey,
],
output_compressed_accounts: &Vec::new(),
root_indices: &rpc_result.root_indices,
proof: &Some(rpc_result.proof),
mint: &token_escrow.token_data.mint,
cpi_context_account: &env.cpi_context_account_pubkey,
old_lock_up_time,
new_lock_up_time,
address: compressed_escrow_pda.compressed_account.address.unwrap(),
input_compressed_accounts: &[compressed_escrow_pda.compressed_account],
};
create_withdrawal_instruction(create_withdrawal_ix_inputs.clone(), escrow_amount)
}
/// 1. Change escrow compressed account exists
/// 2. Withdrawal token account exists
/// 3. Compressed pda with update lock-up time exists
#[allow(clippy::too_many_arguments)]
pub async fn assert_withdrawal<R: RpcConnection>(
rpc: &mut R,
test_indexer: &mut TestIndexer<R>,
env: &EnvAccounts,
payer: &Keypair,
withdrawal_amount: &u64,
escrow_amount: &u64,
seed: &[u8; 32],
lock_up_time: u64,
) {
let escrow_change_amount = escrow_amount - withdrawal_amount;
let payer_pubkey = payer.pubkey();
let token_owner_pda = get_token_owner_pda(&payer_pubkey).0;
let token_data_escrow = test_indexer.token_compressed_accounts.iter().any(|x| {
x.token_data.owner == token_owner_pda && x.token_data.amount == escrow_change_amount
});
assert!(
token_data_escrow,
"change escrow token account does not exist or has incorrect amount",
);
let withdrawal_account_exits = test_indexer
.token_compressed_accounts
.iter()
.any(|x| x.token_data.owner == payer.pubkey() && x.token_data.amount == *withdrawal_amount);
assert!(withdrawal_account_exits);
let compressed_escrow_pda = test_indexer
.compressed_accounts
.iter()
.find(|x| x.compressed_account.owner == token_escrow::ID)
.unwrap()
.clone();
let address = derive_address(&env.address_merkle_tree_pubkey, seed).unwrap();
assert_eq!(
compressed_escrow_pda.compressed_account.address.unwrap(),
address
);
assert_eq!(
compressed_escrow_pda.compressed_account.owner,
token_escrow::ID
);
let compressed_escrow_pda_deserialized = compressed_escrow_pda
.compressed_account
.data
.as_ref()
.unwrap();
let compressed_escrow_pda_data =
EscrowTimeLock::deserialize_reader(&mut &compressed_escrow_pda_deserialized.data[..])
.unwrap();
let current_slot = rpc.get_slot().await.unwrap();
assert_eq!(compressed_escrow_pda_data.slot, lock_up_time + current_slot);
assert_eq!(
compressed_escrow_pda_deserialized.discriminator,
1u64.to_le_bytes(),
);
assert_eq!(
compressed_escrow_pda_deserialized.data_hash,
Poseidon::hash(&compressed_escrow_pda_data.slot.to_le_bytes()).unwrap(),
);
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/lib.rs
|
#![allow(clippy::too_many_arguments)]
use anchor_lang::prelude::*;
use anchor_lang::solana_program::pubkey::Pubkey;
use light_compressed_token::process_transfer::InputTokenDataWithContext;
use light_compressed_token::process_transfer::PackedTokenTransferOutputData;
use light_system_program::invoke::processor::CompressedProof;
pub mod escrow_with_compressed_pda;
pub mod escrow_with_pda;
pub use escrow_with_compressed_pda::escrow::*;
pub use escrow_with_pda::escrow::*;
use light_system_program::sdk::CompressedCpiContext;
use light_system_program::NewAddressParamsPacked;
#[error_code]
pub enum EscrowError {
#[msg("Escrow is locked")]
EscrowLocked,
#[msg("CpiContextAccountIndexNotFound")]
CpiContextAccountIndexNotFound,
}
declare_id!("GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX");
#[program]
pub mod token_escrow {
use self::{
escrow_with_compressed_pda::withdrawal::process_withdraw_compressed_tokens_with_compressed_pda,
escrow_with_pda::withdrawal::process_withdraw_compressed_escrow_tokens_with_pda,
};
use super::*;
/// Escrows compressed tokens, for a certain number of slots.
/// Transfers compressed tokens to compressed token account owned by cpi_signer.
/// Tokens are locked for lock_up_time slots.
pub fn escrow_compressed_tokens_with_pda<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithPda<'info>>,
lock_up_time: u64,
escrow_amount: u64,
proof: CompressedProof,
mint: Pubkey,
signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_state_merkle_tree_account_indices: Vec<u8>,
) -> Result<()> {
process_escrow_compressed_tokens_with_pda(
ctx,
lock_up_time,
escrow_amount,
proof,
mint,
signer_is_delegate,
input_token_data_with_context,
output_state_merkle_tree_account_indices,
)
}
/// Allows the owner to withdraw compressed tokens from the escrow account,
/// provided the lockup time has expired.
pub fn withdraw_compressed_escrow_tokens_with_pda<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithPda<'info>>,
bump: u8,
withdrawal_amount: u64,
proof: CompressedProof,
mint: Pubkey,
signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_state_merkle_tree_account_indices: Vec<u8>,
) -> Result<()> {
process_withdraw_compressed_escrow_tokens_with_pda(
ctx,
bump,
withdrawal_amount,
proof,
mint,
signer_is_delegate,
input_token_data_with_context,
output_state_merkle_tree_account_indices,
)
}
/// Escrows compressed tokens, for a certain number of slots.
/// Transfers compressed tokens to compressed token account owned by cpi_signer.
/// Tokens are locked for lock_up_time slots.
pub fn escrow_compressed_tokens_with_compressed_pda<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithCompressedPda<'info>>,
lock_up_time: u64,
escrow_amount: u64,
proof: CompressedProof,
mint: Pubkey,
signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_state_merkle_tree_account_indices: Vec<u8>,
new_address_params: NewAddressParamsPacked,
cpi_context: CompressedCpiContext,
) -> Result<()> {
process_escrow_compressed_tokens_with_compressed_pda(
ctx,
lock_up_time,
escrow_amount,
proof,
mint,
signer_is_delegate,
input_token_data_with_context,
output_state_merkle_tree_account_indices,
new_address_params,
cpi_context,
)
}
/// Escrows compressed tokens, for a certain number of slots.
/// Transfers compressed tokens to compressed token account owned by cpi_signer.
/// Tokens are locked for lock_up_time slots.
pub fn withdraw_compressed_tokens_with_compressed_pda<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithCompressedPda<'info>>,
withdrawal_amount: u64,
proof: CompressedProof,
mint: Pubkey,
signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_state_merkle_tree_account_indices: Vec<u8>,
cpi_context: CompressedCpiContext,
input_compressed_pda: PackedInputCompressedPda,
bump: u8,
) -> Result<()> {
process_withdraw_compressed_tokens_with_compressed_pda(
ctx,
withdrawal_amount,
proof,
mint,
signer_is_delegate,
input_token_data_with_context,
output_state_merkle_tree_account_indices,
cpi_context,
input_compressed_pda,
bump,
)
}
}
// TODO: add to light_sdk
/// A helper function that creates a new compressed account with the change output.
/// Input sum - Output sum = Change amount
/// Outputs compressed account with the change amount, and owner of the compressed input accounts.
fn create_change_output_compressed_token_account(
input_token_data_with_context: &[InputTokenDataWithContext],
output_compressed_accounts: &[PackedTokenTransferOutputData],
owner: &Pubkey,
merkle_tree_index: u8,
) -> PackedTokenTransferOutputData {
let input_sum = input_token_data_with_context
.iter()
.map(|account| account.amount)
.sum::<u64>();
let output_sum = output_compressed_accounts
.iter()
.map(|account| account.amount)
.sum::<u64>();
let change_amount = input_sum - output_sum;
PackedTokenTransferOutputData {
amount: change_amount,
owner: *owner,
lamports: None,
merkle_tree_index,
tlv: None,
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/escrow_with_compressed_pda/sdk.rs
|
#![cfg(not(target_os = "solana"))]
use crate::escrow_with_compressed_pda::escrow::PackedInputCompressedPda;
use anchor_lang::{InstructionData, ToAccountMetas};
use light_compressed_token::process_transfer::{
get_cpi_authority_pda,
transfer_sdk::{create_inputs_and_remaining_accounts_checked, to_account_metas},
TokenTransferOutputData,
};
use light_system_program::{
invoke::processor::CompressedProof,
sdk::{
address::{add_and_get_remaining_account_indices, pack_new_address_params},
compressed_account::{pack_merkle_context, CompressedAccount, MerkleContext},
CompressedCpiContext,
},
NewAddressParams,
};
use solana_sdk::{instruction::Instruction, pubkey::Pubkey};
#[derive(Debug, Clone)]
pub struct CreateCompressedPdaEscrowInstructionInputs<'a> {
pub lock_up_time: u64,
pub signer: &'a Pubkey,
pub input_merkle_context: &'a [MerkleContext],
pub output_compressed_account_merkle_tree_pubkeys: &'a [Pubkey],
pub output_compressed_accounts: &'a [TokenTransferOutputData],
pub root_indices: &'a [u16],
pub proof: &'a Option<CompressedProof>,
pub input_token_data: &'a [light_compressed_token::token_data::TokenData],
pub input_compressed_accounts: &'a [CompressedAccount],
pub mint: &'a Pubkey,
pub new_address_params: NewAddressParams,
pub cpi_context_account: &'a Pubkey,
}
pub fn create_escrow_instruction(
input_params: CreateCompressedPdaEscrowInstructionInputs,
escrow_amount: u64,
) -> Instruction {
let token_owner_pda = get_token_owner_pda(input_params.signer);
let (mut remaining_accounts, inputs) = create_inputs_and_remaining_accounts_checked(
input_params.input_token_data,
input_params.input_compressed_accounts,
input_params.input_merkle_context,
None,
input_params.output_compressed_accounts,
input_params.root_indices,
input_params.proof,
*input_params.mint,
input_params.signer,
false,
None,
None,
None,
)
.unwrap();
let merkle_tree_indices = add_and_get_remaining_account_indices(
input_params.output_compressed_account_merkle_tree_pubkeys,
&mut remaining_accounts,
);
let new_address_params =
pack_new_address_params(&[input_params.new_address_params], &mut remaining_accounts);
let cpi_context_account_index: u8 = match remaining_accounts
.get(input_params.cpi_context_account)
{
Some(entry) => (*entry).try_into().unwrap(),
None => {
remaining_accounts.insert(*input_params.cpi_context_account, remaining_accounts.len());
(remaining_accounts.len() - 1) as u8
}
};
let instruction_data = crate::instruction::EscrowCompressedTokensWithCompressedPda {
lock_up_time: input_params.lock_up_time,
escrow_amount,
proof: input_params.proof.clone().unwrap(),
mint: *input_params.mint,
signer_is_delegate: false,
input_token_data_with_context: inputs.input_token_data_with_context,
output_state_merkle_tree_account_indices: merkle_tree_indices,
new_address_params: new_address_params[0],
cpi_context: CompressedCpiContext {
set_context: false,
first_set_context: true,
cpi_context_account_index,
},
};
let registered_program_pda = Pubkey::find_program_address(
&[light_system_program::ID.to_bytes().as_slice()],
&account_compression::ID,
)
.0;
let compressed_token_cpi_authority_pda = get_cpi_authority_pda().0;
let account_compression_authority =
light_system_program::utils::get_cpi_authority_pda(&light_system_program::ID);
let cpi_authority_pda = light_sdk::utils::get_cpi_authority_pda(&crate::ID);
let accounts = crate::accounts::EscrowCompressedTokensWithCompressedPda {
signer: *input_params.signer,
noop_program: Pubkey::new_from_array(account_compression::utils::constants::NOOP_PUBKEY),
compressed_token_program: light_compressed_token::ID,
light_system_program: light_system_program::ID,
account_compression_program: account_compression::ID,
registered_program_pda,
compressed_token_cpi_authority_pda,
account_compression_authority,
self_program: crate::ID,
token_owner_pda: token_owner_pda.0,
system_program: solana_sdk::system_program::id(),
cpi_context_account: *input_params.cpi_context_account,
cpi_authority_pda,
};
let remaining_accounts = to_account_metas(remaining_accounts);
Instruction {
program_id: crate::ID,
accounts: [accounts.to_account_metas(Some(true)), remaining_accounts].concat(),
data: instruction_data.data(),
}
}
pub fn get_token_owner_pda(signer: &Pubkey) -> (Pubkey, u8) {
Pubkey::find_program_address(
&[b"escrow".as_ref(), signer.to_bytes().as_ref()],
&crate::id(),
)
}
#[derive(Debug, Clone)]
pub struct CreateCompressedPdaWithdrawalInstructionInputs<'a> {
pub signer: &'a Pubkey,
pub input_token_escrow_merkle_context: MerkleContext,
pub input_cpda_merkle_context: MerkleContext,
pub output_compressed_account_merkle_tree_pubkeys: &'a [Pubkey],
pub output_compressed_accounts: &'a [TokenTransferOutputData],
pub root_indices: &'a [u16],
pub proof: &'a Option<CompressedProof>,
pub input_token_data: &'a [light_compressed_token::token_data::TokenData],
pub input_compressed_accounts: &'a [CompressedAccount],
pub mint: &'a Pubkey,
pub old_lock_up_time: u64,
pub new_lock_up_time: u64,
pub address: [u8; 32],
pub cpi_context_account: &'a Pubkey,
}
pub fn create_withdrawal_instruction(
input_params: CreateCompressedPdaWithdrawalInstructionInputs,
withdrawal_amount: u64,
) -> Instruction {
let (token_owner_pda, bump) = get_token_owner_pda(input_params.signer);
let (mut remaining_accounts, inputs) = create_inputs_and_remaining_accounts_checked(
input_params.input_token_data,
input_params.input_compressed_accounts,
&[input_params.input_token_escrow_merkle_context],
None,
input_params.output_compressed_accounts,
input_params.root_indices,
input_params.proof,
*input_params.mint,
&token_owner_pda,
false,
None,
None,
None,
)
.unwrap();
let merkle_tree_indices = add_and_get_remaining_account_indices(
input_params.output_compressed_account_merkle_tree_pubkeys,
&mut remaining_accounts,
);
let merkle_context_packed = pack_merkle_context(
&[
input_params.input_cpda_merkle_context,
input_params.input_token_escrow_merkle_context,
],
&mut remaining_accounts,
);
let cpi_context_account_index: u8 = match remaining_accounts
.get(input_params.cpi_context_account)
{
Some(entry) => (*entry).try_into().unwrap(),
None => {
remaining_accounts.insert(*input_params.cpi_context_account, remaining_accounts.len());
(remaining_accounts.len() - 1) as u8
}
};
let cpi_context = CompressedCpiContext {
set_context: false,
first_set_context: true,
cpi_context_account_index,
};
let input_compressed_pda = PackedInputCompressedPda {
old_lock_up_time: input_params.old_lock_up_time,
new_lock_up_time: input_params.new_lock_up_time,
address: input_params.address,
merkle_context: merkle_context_packed[0],
root_index: input_params.root_indices[0],
};
let instruction_data = crate::instruction::WithdrawCompressedTokensWithCompressedPda {
proof: input_params.proof.clone().unwrap(),
mint: *input_params.mint,
signer_is_delegate: false,
input_token_data_with_context: inputs.input_token_data_with_context,
output_state_merkle_tree_account_indices: merkle_tree_indices,
cpi_context,
input_compressed_pda,
withdrawal_amount,
bump,
};
let registered_program_pda = Pubkey::find_program_address(
&[light_system_program::ID.to_bytes().as_slice()],
&account_compression::ID,
)
.0;
let compressed_token_cpi_authority_pda = get_cpi_authority_pda().0;
let account_compression_authority =
light_system_program::utils::get_cpi_authority_pda(&light_system_program::ID);
let cpi_authority_pda = light_system_program::utils::get_cpi_authority_pda(&crate::ID);
let accounts = crate::accounts::EscrowCompressedTokensWithCompressedPda {
signer: *input_params.signer,
noop_program: Pubkey::new_from_array(account_compression::utils::constants::NOOP_PUBKEY),
compressed_token_program: light_compressed_token::ID,
light_system_program: light_system_program::ID,
account_compression_program: account_compression::ID,
registered_program_pda,
compressed_token_cpi_authority_pda,
account_compression_authority,
self_program: crate::ID,
token_owner_pda,
system_program: solana_sdk::system_program::id(),
cpi_context_account: *input_params.cpi_context_account,
cpi_authority_pda,
};
let remaining_accounts = to_account_metas(remaining_accounts);
Instruction {
program_id: crate::ID,
accounts: [accounts.to_account_metas(Some(true)), remaining_accounts].concat(),
data: instruction_data.data(),
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/escrow_with_compressed_pda/withdrawal.rs
|
use account_compression::utils::constants::CPI_AUTHORITY_PDA_SEED;
use anchor_lang::prelude::*;
use light_compressed_token::process_transfer::{
CompressedTokenInstructionDataTransfer, InputTokenDataWithContext,
PackedTokenTransferOutputData,
};
use light_hasher::{DataHasher, Poseidon};
use light_sdk::verify::verify;
use light_system_program::{
invoke::processor::CompressedProof,
sdk::{
compressed_account::{
CompressedAccount, CompressedAccountData, PackedCompressedAccountWithMerkleContext,
},
CompressedCpiContext,
},
InstructionDataInvokeCpi, OutputCompressedAccountWithPackedContext,
};
use crate::{
create_change_output_compressed_token_account, EscrowCompressedTokensWithCompressedPda,
EscrowError, EscrowTimeLock, PackedInputCompressedPda,
};
pub fn process_withdraw_compressed_tokens_with_compressed_pda<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithCompressedPda<'info>>,
withdrawal_amount: u64,
proof: CompressedProof,
mint: Pubkey,
signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_state_merkle_tree_account_indices: Vec<u8>,
cpi_context: CompressedCpiContext,
input_compressed_pda: PackedInputCompressedPda,
bump: u8,
) -> Result<()> {
let current_slot = Clock::get()?.slot;
if current_slot < input_compressed_pda.old_lock_up_time {
return err!(EscrowError::EscrowLocked);
}
let (old_state, new_state) = create_compressed_pda_data_based_on_diff(&input_compressed_pda)?;
let withdrawal_token_data = PackedTokenTransferOutputData {
amount: withdrawal_amount,
owner: ctx.accounts.signer.key(),
lamports: None,
merkle_tree_index: output_state_merkle_tree_account_indices[0],
tlv: None,
};
let escrow_change_token_data = create_change_output_compressed_token_account(
&input_token_data_with_context,
&[withdrawal_token_data.clone()],
&ctx.accounts.token_owner_pda.key(),
output_state_merkle_tree_account_indices[1],
);
let output_compressed_accounts = vec![withdrawal_token_data, escrow_change_token_data];
cpi_compressed_token_withdrawal(
&ctx,
mint,
signer_is_delegate,
input_token_data_with_context,
output_compressed_accounts,
proof.clone(),
bump,
cpi_context,
)?;
cpi_compressed_pda_withdrawal(ctx, proof, old_state, new_state, cpi_context)?;
Ok(())
}
fn create_compressed_pda_data_based_on_diff(
input_compressed_pda: &PackedInputCompressedPda,
) -> Result<(
PackedCompressedAccountWithMerkleContext,
OutputCompressedAccountWithPackedContext,
)> {
let current_slot = Clock::get()?.slot;
let old_timelock_compressed_pda = EscrowTimeLock {
slot: input_compressed_pda.old_lock_up_time,
};
let old_compressed_account_data = CompressedAccountData {
discriminator: 1u64.to_le_bytes(),
data: old_timelock_compressed_pda.try_to_vec().unwrap(),
data_hash: old_timelock_compressed_pda
.hash::<Poseidon>()
.map_err(ProgramError::from)?,
};
let old_compressed_account = OutputCompressedAccountWithPackedContext {
compressed_account: CompressedAccount {
owner: crate::ID,
lamports: 0,
address: Some(input_compressed_pda.address),
data: Some(old_compressed_account_data),
},
merkle_tree_index: input_compressed_pda.merkle_context.merkle_tree_pubkey_index,
};
let old_compressed_account_with_context = PackedCompressedAccountWithMerkleContext {
compressed_account: old_compressed_account.compressed_account,
merkle_context: input_compressed_pda.merkle_context,
root_index: input_compressed_pda.root_index,
read_only: false,
};
let new_timelock_compressed_pda = EscrowTimeLock {
slot: current_slot
.checked_add(input_compressed_pda.new_lock_up_time)
.unwrap(),
};
let new_compressed_account_data = CompressedAccountData {
discriminator: 1u64.to_le_bytes(),
data: new_timelock_compressed_pda.try_to_vec().unwrap(),
data_hash: new_timelock_compressed_pda
.hash::<Poseidon>()
.map_err(ProgramError::from)?,
};
let new_state = OutputCompressedAccountWithPackedContext {
compressed_account: CompressedAccount {
owner: crate::ID,
lamports: 0,
address: Some(input_compressed_pda.address),
data: Some(new_compressed_account_data),
},
merkle_tree_index: input_compressed_pda.merkle_context.merkle_tree_pubkey_index,
};
Ok((old_compressed_account_with_context, new_state))
}
fn cpi_compressed_pda_withdrawal<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithCompressedPda<'info>>,
proof: CompressedProof,
old_state: PackedCompressedAccountWithMerkleContext,
compressed_pda: OutputCompressedAccountWithPackedContext,
mut cpi_context: CompressedCpiContext,
) -> Result<()> {
// Create CPI signer seed
let bump = Pubkey::find_program_address(&[b"cpi_authority"], &crate::ID).1;
let bump = [bump];
let signer_seeds = [CPI_AUTHORITY_PDA_SEED, &bump];
cpi_context.first_set_context = false;
// Create CPI inputs
let inputs_struct = InstructionDataInvokeCpi {
relay_fee: None,
input_compressed_accounts_with_merkle_context: vec![old_state],
output_compressed_accounts: vec![compressed_pda],
proof: Some(proof),
new_address_params: Vec::new(),
compress_or_decompress_lamports: None,
is_compress: false,
cpi_context: Some(cpi_context),
};
verify(&ctx, &inputs_struct, &[&signer_seeds])?;
Ok(())
}
// TODO: test with delegate (is disabled right now)
#[inline(never)]
pub fn cpi_compressed_token_withdrawal<'info>(
ctx: &Context<'_, '_, '_, 'info, EscrowCompressedTokensWithCompressedPda<'info>>,
mint: Pubkey,
_signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_compressed_accounts: Vec<PackedTokenTransferOutputData>,
proof: CompressedProof,
bump: u8,
mut cpi_context: CompressedCpiContext,
) -> Result<()> {
let bump = &[bump];
let signer_bytes = ctx.accounts.signer.key.to_bytes();
let seeds: [&[u8]; 3] = [b"escrow".as_slice(), signer_bytes.as_slice(), bump];
cpi_context.set_context = true;
let inputs_struct = CompressedTokenInstructionDataTransfer {
proof: Some(proof),
mint,
delegated_transfer: None,
input_token_data_with_context,
output_compressed_accounts,
is_compress: false,
compress_or_decompress_amount: None,
cpi_context: Some(cpi_context),
lamports_change_account_merkle_tree_index: None,
};
let mut inputs = Vec::new();
CompressedTokenInstructionDataTransfer::serialize(&inputs_struct, &mut inputs).unwrap();
let cpi_accounts = light_compressed_token::cpi::accounts::TransferInstruction {
fee_payer: ctx.accounts.signer.to_account_info(),
authority: ctx.accounts.token_owner_pda.to_account_info(),
registered_program_pda: ctx.accounts.registered_program_pda.to_account_info(),
noop_program: ctx.accounts.noop_program.to_account_info(),
account_compression_authority: ctx.accounts.account_compression_authority.to_account_info(),
account_compression_program: ctx.accounts.account_compression_program.to_account_info(),
self_program: ctx.accounts.compressed_token_program.to_account_info(),
cpi_authority_pda: ctx
.accounts
.compressed_token_cpi_authority_pda
.to_account_info(),
light_system_program: ctx.accounts.light_system_program.to_account_info(),
token_pool_pda: None,
compress_or_decompress_token_account: None,
token_program: None,
system_program: ctx.accounts.system_program.to_account_info(),
};
let signer_seeds: [&[&[u8]]; 1] = [&seeds[..]];
let mut cpi_ctx = CpiContext::new_with_signer(
ctx.accounts.compressed_token_program.to_account_info(),
cpi_accounts,
&signer_seeds,
);
cpi_ctx.remaining_accounts = ctx.remaining_accounts.to_vec();
light_compressed_token::cpi::transfer(cpi_ctx, inputs)?;
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/escrow_with_compressed_pda/mod.rs
|
pub mod escrow;
pub mod sdk;
pub mod withdrawal;
pub use escrow::*;
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/escrow_with_compressed_pda/escrow.rs
|
use crate::{create_change_output_compressed_token_account, program::TokenEscrow, EscrowTimeLock};
use account_compression::utils::constants::CPI_AUTHORITY_PDA_SEED;
use anchor_lang::prelude::*;
use light_compressed_token::{
process_transfer::{
CompressedTokenInstructionDataTransfer, InputTokenDataWithContext,
PackedTokenTransferOutputData,
},
program::LightCompressedToken,
};
use light_hasher::{errors::HasherError, DataHasher, Hasher, Poseidon};
use light_sdk::{
legacy::create_cpi_inputs_for_new_account, light_system_accounts, verify::verify, LightTraits,
};
use light_system_program::{
invoke::processor::CompressedProof,
sdk::{
address::derive_address,
compressed_account::{CompressedAccount, CompressedAccountData, PackedMerkleContext},
CompressedCpiContext,
},
NewAddressParamsPacked, OutputCompressedAccountWithPackedContext,
};
#[light_system_accounts]
#[derive(Accounts, LightTraits)]
pub struct EscrowCompressedTokensWithCompressedPda<'info> {
#[account(mut)]
#[fee_payer]
pub signer: Signer<'info>,
/// CHECK:
#[account(seeds = [b"escrow".as_slice(), signer.key.to_bytes().as_slice()], bump)]
pub token_owner_pda: AccountInfo<'info>,
pub compressed_token_program: Program<'info, LightCompressedToken>,
pub compressed_token_cpi_authority_pda: AccountInfo<'info>,
#[self_program]
pub self_program: Program<'info, TokenEscrow>,
/// CHECK:
#[cpi_context]
#[account(mut)]
pub cpi_context_account: AccountInfo<'info>,
#[authority]
#[account(seeds = [CPI_AUTHORITY_PDA_SEED], bump)]
pub cpi_authority_pda: AccountInfo<'info>,
}
#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)]
pub struct PackedInputCompressedPda {
pub old_lock_up_time: u64,
pub new_lock_up_time: u64,
pub address: [u8; 32],
pub merkle_context: PackedMerkleContext,
pub root_index: u16,
}
/// create compressed pda data
/// transfer tokens
/// execute complete transaction
pub fn process_escrow_compressed_tokens_with_compressed_pda<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithCompressedPda<'info>>,
lock_up_time: u64,
escrow_amount: u64,
proof: CompressedProof,
mint: Pubkey,
signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_state_merkle_tree_account_indices: Vec<u8>,
new_address_params: NewAddressParamsPacked,
cpi_context: CompressedCpiContext,
) -> Result<()> {
let compressed_pda = create_compressed_pda_data(lock_up_time, &ctx, &new_address_params)?;
let escrow_token_data = PackedTokenTransferOutputData {
amount: escrow_amount,
owner: ctx.accounts.token_owner_pda.key(),
lamports: None,
merkle_tree_index: output_state_merkle_tree_account_indices[0],
tlv: None,
};
let change_token_data = create_change_output_compressed_token_account(
&input_token_data_with_context,
&[escrow_token_data.clone()],
&ctx.accounts.signer.key(),
output_state_merkle_tree_account_indices[1],
);
let output_compressed_accounts = vec![escrow_token_data, change_token_data];
cpi_compressed_token_transfer_pda(
&ctx,
mint,
signer_is_delegate,
input_token_data_with_context,
output_compressed_accounts,
proof.clone(),
cpi_context,
)?;
cpi_compressed_pda_transfer(ctx, proof, new_address_params, compressed_pda, cpi_context)?;
Ok(())
}
fn cpi_compressed_pda_transfer<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithCompressedPda<'info>>,
proof: CompressedProof,
new_address_params: NewAddressParamsPacked,
compressed_pda: OutputCompressedAccountWithPackedContext,
mut cpi_context: CompressedCpiContext,
) -> Result<()> {
let bump = Pubkey::find_program_address(&[b"cpi_authority"], &crate::ID).1;
let bump = [bump];
let signer_seeds = [CPI_AUTHORITY_PDA_SEED, &bump];
cpi_context.first_set_context = false;
// Create inputs struct
let inputs_struct = create_cpi_inputs_for_new_account(
proof,
new_address_params,
compressed_pda,
Some(cpi_context),
);
verify(&ctx, &inputs_struct, &[&signer_seeds])?;
Ok(())
}
fn create_compressed_pda_data(
lock_up_time: u64,
ctx: &Context<'_, '_, '_, '_, EscrowCompressedTokensWithCompressedPda<'_>>,
new_address_params: &NewAddressParamsPacked,
) -> Result<OutputCompressedAccountWithPackedContext> {
let current_slot = Clock::get()?.slot;
let timelock_compressed_pda = EscrowTimeLock {
slot: current_slot.checked_add(lock_up_time).unwrap(),
};
let compressed_account_data = CompressedAccountData {
discriminator: 1u64.to_le_bytes(),
data: timelock_compressed_pda.try_to_vec().unwrap(),
data_hash: timelock_compressed_pda
.hash::<Poseidon>()
.map_err(ProgramError::from)?,
};
let derive_address = derive_address(
&ctx.remaining_accounts[new_address_params.address_merkle_tree_account_index as usize]
.key(),
&new_address_params.seed,
)
.map_err(|_| ProgramError::InvalidArgument)?;
Ok(OutputCompressedAccountWithPackedContext {
compressed_account: CompressedAccount {
owner: crate::ID,
lamports: 0,
address: Some(derive_address),
data: Some(compressed_account_data),
},
merkle_tree_index: 0,
})
}
impl light_hasher::DataHasher for EscrowTimeLock {
fn hash<H: Hasher>(&self) -> std::result::Result<[u8; 32], HasherError> {
H::hash(&self.slot.to_le_bytes())
}
}
#[inline(never)]
pub fn cpi_compressed_token_transfer_pda<'info>(
ctx: &Context<'_, '_, '_, 'info, EscrowCompressedTokensWithCompressedPda<'info>>,
mint: Pubkey,
_signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_compressed_accounts: Vec<PackedTokenTransferOutputData>,
proof: CompressedProof,
mut cpi_context: CompressedCpiContext,
) -> Result<()> {
cpi_context.set_context = true;
let inputs_struct = CompressedTokenInstructionDataTransfer {
proof: Some(proof),
mint,
delegated_transfer: None,
input_token_data_with_context,
output_compressed_accounts,
is_compress: false,
compress_or_decompress_amount: None,
cpi_context: Some(cpi_context),
lamports_change_account_merkle_tree_index: None,
};
let mut inputs = Vec::new();
CompressedTokenInstructionDataTransfer::serialize(&inputs_struct, &mut inputs).unwrap();
let cpi_accounts = light_compressed_token::cpi::accounts::TransferInstruction {
fee_payer: ctx.accounts.signer.to_account_info(),
authority: ctx.accounts.signer.to_account_info(),
registered_program_pda: ctx.accounts.registered_program_pda.to_account_info(),
noop_program: ctx.accounts.noop_program.to_account_info(),
account_compression_authority: ctx.accounts.account_compression_authority.to_account_info(),
account_compression_program: ctx.accounts.account_compression_program.to_account_info(),
self_program: ctx.accounts.compressed_token_program.to_account_info(),
cpi_authority_pda: ctx
.accounts
.compressed_token_cpi_authority_pda
.to_account_info(),
light_system_program: ctx.accounts.light_system_program.to_account_info(),
token_pool_pda: None,
compress_or_decompress_token_account: None,
token_program: None,
system_program: ctx.accounts.system_program.to_account_info(),
};
let mut cpi_ctx = CpiContext::new(
ctx.accounts.compressed_token_program.to_account_info(),
cpi_accounts,
);
cpi_ctx.remaining_accounts = ctx.remaining_accounts.to_vec();
light_compressed_token::cpi::transfer(cpi_ctx, inputs)?;
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/escrow_with_pda/sdk.rs
|
#![cfg(not(target_os = "solana"))]
use anchor_lang::{InstructionData, ToAccountMetas};
use light_compressed_token::{
process_transfer::get_cpi_authority_pda,
process_transfer::{
transfer_sdk::{
create_inputs_and_remaining_accounts, create_inputs_and_remaining_accounts_checked,
to_account_metas,
},
TokenTransferOutputData,
},
};
use light_system_program::{
invoke::processor::CompressedProof,
sdk::{
address::add_and_get_remaining_account_indices,
compressed_account::{CompressedAccount, MerkleContext},
},
};
use solana_sdk::{instruction::Instruction, pubkey::Pubkey};
use crate::escrow_with_compressed_pda::sdk::get_token_owner_pda;
#[derive(Debug, Clone, Copy)]
pub struct CreateEscrowInstructionInputs<'a> {
pub lock_up_time: u64,
pub signer: &'a Pubkey,
pub input_merkle_context: &'a [MerkleContext],
pub output_compressed_account_merkle_tree_pubkeys: &'a [Pubkey],
pub output_compressed_accounts: &'a [TokenTransferOutputData],
pub root_indices: &'a [u16],
pub proof: &'a Option<CompressedProof>,
pub input_token_data: &'a [light_compressed_token::token_data::TokenData],
pub input_compressed_accounts: &'a [CompressedAccount],
pub mint: &'a Pubkey,
}
pub fn get_timelock_pda(signer: &Pubkey) -> Pubkey {
Pubkey::find_program_address(&[b"timelock".as_ref(), signer.as_ref()], &crate::id()).0
}
pub fn create_escrow_instruction(
input_params: CreateEscrowInstructionInputs,
escrow_amount: u64,
) -> Instruction {
let token_owner_pda = get_token_owner_pda(input_params.signer);
let timelock_pda = get_timelock_pda(input_params.signer);
// TODO: separate the creation of inputs and remaining accounts
let (mut remaining_accounts, inputs) = create_inputs_and_remaining_accounts_checked(
input_params.input_token_data,
input_params.input_compressed_accounts,
input_params.input_merkle_context,
None,
input_params.output_compressed_accounts,
input_params.root_indices,
input_params.proof,
*input_params.mint,
input_params.signer,
false,
None,
None,
None,
)
.unwrap();
let merkle_tree_indices = add_and_get_remaining_account_indices(
input_params.output_compressed_account_merkle_tree_pubkeys,
&mut remaining_accounts,
);
let instruction_data = crate::instruction::EscrowCompressedTokensWithPda {
lock_up_time: input_params.lock_up_time,
escrow_amount,
proof: input_params.proof.clone().unwrap(),
mint: *input_params.mint,
signer_is_delegate: false,
input_token_data_with_context: inputs.input_token_data_with_context,
output_state_merkle_tree_account_indices: merkle_tree_indices,
};
let registered_program_pda = Pubkey::find_program_address(
&[light_system_program::ID.to_bytes().as_slice()],
&account_compression::ID,
)
.0;
let compressed_token_cpi_authority_pda = get_cpi_authority_pda().0;
let account_compression_authority =
light_system_program::utils::get_cpi_authority_pda(&light_system_program::ID);
let accounts = crate::accounts::EscrowCompressedTokensWithPda {
signer: *input_params.signer,
noop_program: Pubkey::new_from_array(account_compression::utils::constants::NOOP_PUBKEY),
compressed_token_program: light_compressed_token::ID,
light_system_program: light_system_program::ID,
account_compression_program: account_compression::ID,
registered_program_pda,
compressed_token_cpi_authority_pda,
account_compression_authority,
timelock_pda,
system_program: solana_sdk::system_program::ID,
token_owner_pda: token_owner_pda.0,
};
let remaining_accounts = to_account_metas(remaining_accounts);
Instruction {
program_id: crate::ID,
accounts: [accounts.to_account_metas(Some(true)), remaining_accounts].concat(),
data: instruction_data.data(),
}
}
pub fn create_withdrawal_escrow_instruction(
input_params: CreateEscrowInstructionInputs,
withdrawal_amount: u64,
) -> Instruction {
let token_owner_pda = get_token_owner_pda(input_params.signer);
let timelock_pda = get_timelock_pda(input_params.signer);
// Token transactions with an invalid signer will just fail with invalid proof verification.
// Thus, it's recommented to use create_inputs_and_remaining_accounts_checked, which returns a descriptive error in case of a wrong signer.
// We use unchecked here to perform a failing test with an invalid signer.
let (mut remaining_accounts, inputs) = create_inputs_and_remaining_accounts(
input_params.input_token_data,
input_params.input_compressed_accounts,
input_params.input_merkle_context,
None,
input_params.output_compressed_accounts,
input_params.root_indices,
input_params.proof,
*input_params.mint,
false,
None,
None,
None,
);
let merkle_tree_indices = add_and_get_remaining_account_indices(
input_params.output_compressed_account_merkle_tree_pubkeys,
&mut remaining_accounts,
);
let instruction_data = crate::instruction::WithdrawCompressedEscrowTokensWithPda {
bump: token_owner_pda.1,
withdrawal_amount,
proof: input_params.proof.clone().unwrap(),
mint: *input_params.mint,
signer_is_delegate: false,
input_token_data_with_context: inputs.input_token_data_with_context,
output_state_merkle_tree_account_indices: merkle_tree_indices,
};
let registered_program_pda = Pubkey::find_program_address(
&[light_system_program::ID.to_bytes().as_slice()],
&account_compression::ID,
)
.0;
let compressed_token_cpi_authority_pda = get_cpi_authority_pda().0;
let account_compression_authority =
light_system_program::utils::get_cpi_authority_pda(&light_system_program::ID);
let accounts = crate::accounts::EscrowCompressedTokensWithPda {
signer: *input_params.signer,
token_owner_pda: token_owner_pda.0,
noop_program: Pubkey::new_from_array(account_compression::utils::constants::NOOP_PUBKEY),
compressed_token_program: light_compressed_token::ID,
light_system_program: light_system_program::ID,
account_compression_program: account_compression::ID,
registered_program_pda,
compressed_token_cpi_authority_pda,
account_compression_authority,
timelock_pda,
system_program: solana_sdk::system_program::ID,
};
let remaining_accounts = to_account_metas(remaining_accounts);
Instruction {
program_id: crate::ID,
accounts: [accounts.to_account_metas(Some(true)), remaining_accounts].concat(),
data: instruction_data.data(),
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/escrow_with_pda/withdrawal.rs
|
use anchor_lang::prelude::*;
use light_compressed_token::process_transfer::{
CompressedTokenInstructionDataTransfer, InputTokenDataWithContext,
PackedTokenTransferOutputData,
};
use light_system_program::invoke::processor::CompressedProof;
use crate::{
create_change_output_compressed_token_account, EscrowCompressedTokensWithPda, EscrowError,
};
/// Allows the owner to withdraw compressed tokens from the escrow account,
/// provided the lockup time has expired.
pub fn process_withdraw_compressed_escrow_tokens_with_pda<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithPda<'info>>,
bump: u8,
withdrawal_amount: u64,
proof: CompressedProof,
mint: Pubkey,
signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_state_merkle_tree_account_indices: Vec<u8>,
) -> Result<()> {
let current_slot = Clock::get()?.slot;
if current_slot < ctx.accounts.timelock_pda.slot {
return err!(EscrowError::EscrowLocked);
}
let escrow_token_data = PackedTokenTransferOutputData {
amount: withdrawal_amount,
owner: ctx.accounts.signer.key(),
lamports: None,
merkle_tree_index: output_state_merkle_tree_account_indices[0],
tlv: None,
};
let change_token_data = create_change_output_compressed_token_account(
&input_token_data_with_context,
&[escrow_token_data.clone()],
&ctx.accounts.token_owner_pda.key(),
output_state_merkle_tree_account_indices[1],
);
let output_compressed_accounts = vec![escrow_token_data, change_token_data];
withdrawal_cpi_compressed_token_transfer(
&ctx,
bump,
proof,
mint,
signer_is_delegate,
input_token_data_with_context,
output_compressed_accounts,
)
}
#[inline(never)]
pub fn withdrawal_cpi_compressed_token_transfer<'info>(
ctx: &Context<'_, '_, '_, 'info, EscrowCompressedTokensWithPda<'info>>,
bump: u8,
proof: CompressedProof,
mint: Pubkey,
_signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_compressed_accounts: Vec<PackedTokenTransferOutputData>,
) -> Result<()> {
let inputs_struct = CompressedTokenInstructionDataTransfer {
proof: Some(proof),
mint,
delegated_transfer: None,
input_token_data_with_context,
output_compressed_accounts,
is_compress: false,
compress_or_decompress_amount: None,
cpi_context: None,
lamports_change_account_merkle_tree_index: None,
};
let mut inputs = Vec::new();
CompressedTokenInstructionDataTransfer::serialize(&inputs_struct, &mut inputs).unwrap();
let bump = &[bump];
let signer_bytes = ctx.accounts.signer.key.to_bytes();
let seeds = [b"escrow".as_slice(), signer_bytes.as_slice(), bump];
let signer_seeds = &[&seeds[..]];
let cpi_accounts = light_compressed_token::cpi::accounts::TransferInstruction {
fee_payer: ctx.accounts.signer.to_account_info(),
authority: ctx.accounts.token_owner_pda.to_account_info(),
registered_program_pda: ctx.accounts.registered_program_pda.to_account_info(),
noop_program: ctx.accounts.noop_program.to_account_info(),
account_compression_authority: ctx.accounts.account_compression_authority.to_account_info(),
account_compression_program: ctx.accounts.account_compression_program.to_account_info(),
self_program: ctx.accounts.compressed_token_program.to_account_info(),
cpi_authority_pda: ctx
.accounts
.compressed_token_cpi_authority_pda
.to_account_info(),
light_system_program: ctx.accounts.light_system_program.to_account_info(),
token_pool_pda: None,
compress_or_decompress_token_account: None,
token_program: None,
system_program: ctx.accounts.system_program.to_account_info(),
};
let mut cpi_ctx = CpiContext::new_with_signer(
ctx.accounts.compressed_token_program.to_account_info(),
cpi_accounts,
signer_seeds,
);
cpi_ctx.remaining_accounts = ctx.remaining_accounts.to_vec();
light_compressed_token::cpi::transfer(cpi_ctx, inputs)?;
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/escrow_with_pda/mod.rs
|
pub mod escrow;
pub mod sdk;
pub mod withdrawal;
pub use escrow::*;
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src
|
solana_public_repos/Lightprotocol/light-protocol/examples/token-escrow/programs/token-escrow/src/escrow_with_pda/escrow.rs
|
use crate::create_change_output_compressed_token_account;
use anchor_lang::prelude::*;
use light_compressed_token::{
process_transfer::{
CompressedTokenInstructionDataTransfer, InputTokenDataWithContext,
PackedTokenTransferOutputData,
},
program::LightCompressedToken,
};
use light_sdk::{light_system_accounts, LightTraits};
use light_system_program::invoke::processor::CompressedProof;
#[light_system_accounts]
#[derive(Accounts, LightTraits)]
pub struct EscrowCompressedTokensWithPda<'info> {
#[account(mut)]
#[fee_payer]
pub signer: Signer<'info>,
/// CHECK:
#[authority]
#[account(seeds = [b"escrow".as_slice(), signer.key.to_bytes().as_slice()], bump)]
pub token_owner_pda: AccountInfo<'info>,
#[self_program]
pub compressed_token_program: Program<'info, LightCompressedToken>,
/// CHECK:
pub compressed_token_cpi_authority_pda: AccountInfo<'info>,
#[account(init_if_needed, seeds = [b"timelock".as_slice(), signer.key.to_bytes().as_slice()],bump, payer = signer, space = 8 + 8)]
pub timelock_pda: Account<'info, EscrowTimeLock>,
}
#[derive(Debug)]
#[account]
pub struct EscrowTimeLock {
pub slot: u64,
}
pub fn process_escrow_compressed_tokens_with_pda<'info>(
ctx: Context<'_, '_, '_, 'info, EscrowCompressedTokensWithPda<'info>>,
lock_up_time: u64,
escrow_amount: u64,
proof: CompressedProof,
mint: Pubkey,
signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_state_merkle_tree_account_indices: Vec<u8>,
) -> Result<()> {
// set timelock
let current_slot = Clock::get()?.slot;
ctx.accounts.timelock_pda.slot = current_slot.checked_add(lock_up_time).unwrap();
let escrow_token_data = PackedTokenTransferOutputData {
amount: escrow_amount,
owner: ctx.accounts.token_owner_pda.key(),
lamports: None,
merkle_tree_index: output_state_merkle_tree_account_indices[0],
tlv: None,
};
let change_token_data = create_change_output_compressed_token_account(
&input_token_data_with_context,
&[escrow_token_data.clone()],
&ctx.accounts.signer.key(),
output_state_merkle_tree_account_indices[1],
);
let output_compressed_accounts = vec![escrow_token_data, change_token_data];
cpi_compressed_token_transfer(
&ctx,
proof,
mint,
signer_is_delegate,
input_token_data_with_context,
output_compressed_accounts,
)
}
#[inline(never)]
pub fn cpi_compressed_token_transfer<'info>(
ctx: &Context<'_, '_, '_, 'info, EscrowCompressedTokensWithPda<'info>>,
proof: CompressedProof,
mint: Pubkey,
_signer_is_delegate: bool,
input_token_data_with_context: Vec<InputTokenDataWithContext>,
output_compressed_accounts: Vec<PackedTokenTransferOutputData>,
) -> Result<()> {
let inputs_struct = CompressedTokenInstructionDataTransfer {
proof: Some(proof),
mint,
delegated_transfer: None,
input_token_data_with_context,
output_compressed_accounts,
is_compress: false,
compress_or_decompress_amount: None,
cpi_context: None,
lamports_change_account_merkle_tree_index: None,
};
let mut inputs = Vec::new();
CompressedTokenInstructionDataTransfer::serialize(&inputs_struct, &mut inputs).unwrap();
let cpi_accounts = light_compressed_token::cpi::accounts::TransferInstruction {
fee_payer: ctx.accounts.signer.to_account_info(),
authority: ctx.accounts.signer.to_account_info(),
registered_program_pda: ctx.accounts.registered_program_pda.to_account_info(),
noop_program: ctx.accounts.noop_program.to_account_info(),
account_compression_authority: ctx.accounts.account_compression_authority.to_account_info(),
account_compression_program: ctx.accounts.account_compression_program.to_account_info(),
self_program: ctx.accounts.compressed_token_program.to_account_info(),
cpi_authority_pda: ctx
.accounts
.compressed_token_cpi_authority_pda
.to_account_info(),
light_system_program: ctx.accounts.light_system_program.to_account_info(),
token_pool_pda: None,
compress_or_decompress_token_account: None,
token_program: None,
system_program: ctx.accounts.system_program.to_account_info(),
};
let mut cpi_ctx = CpiContext::new(
ctx.accounts.compressed_token_program.to_account_info(),
cpi_accounts,
);
cpi_ctx.remaining_accounts = ctx.remaining_accounts.to_vec();
light_compressed_token::cpi::transfer(cpi_ctx, inputs)?;
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/node
|
solana_public_repos/Lightprotocol/light-protocol/examples/node/esm/package.json
|
{
"name": "stateless.js-node-client",
"type": "module",
"version": "0.1.0",
"description": "Example node client for stateless.js",
"main": "index.js",
"scripts": {
"build": "tsc",
"test-validator": "./../../../cli/test_bin/run test-validator",
"test": "pnpm test-validator && node dist/index.js"
},
"keywords": [],
"author": "",
"license": "Apache-2.0",
"devDependencies": {
"@types/node": "^22.4.1",
"typescript": "^5.5.4"
},
"dependencies": {
"@coral-xyz/anchor": "^0.30.0",
"@solana/web3.js": "^1.95.3",
"@lightprotocol/stateless.js": "workspace:*"
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/node
|
solana_public_repos/Lightprotocol/light-protocol/examples/node/esm/tsconfig.json
|
{
"compilerOptions": {
"moduleResolution": "Node",
"module": "ESNext",
"outDir": "dist",
"sourceMap": true,
"allowSyntheticDefaultImports": true,
"esModuleInterop": true
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/examples/node/esm
|
solana_public_repos/Lightprotocol/light-protocol/examples/node/esm/src/index.ts
|
import { airdropSol, compress, createRpc } from "@lightprotocol/stateless.js";
import { Keypair } from "@solana/web3.js";
const rpc = createRpc();
const keypair = new Keypair();
(async () => {
await airdropSol({
connection: rpc,
lamports: 1e11,
recipientPublicKey: keypair.publicKey,
});
const tx = await compress(rpc, keypair, 1e9, keypair.publicKey);
console.log("compress tx", tx);
})();
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/push-compressed-token-idl.sh
|
#!/usr/bin/env bash
cd "$(git rev-parse --show-toplevel)"
PDA_FILE="target/idl/light_system_program.json"
TOKEN_FILE="target/idl/light_compressed_token.json"
DEST_DIR="js/compressed-token/src/idl"
TS_FILE="$DEST_DIR/light_compressed_token.ts" # ts output file path
DEST_DIR_STATELESS="js/stateless.js/src/idls"
TS_FILE_STATELESS="$DEST_DIR_STATELESS/light_compressed_token.ts" # ts output file path
TYPE_NAME="LightCompressedToken" # ts type name
# Check if jq is installed
if ! command -v jq &> /dev/null
then
echo "jq could not be found. Please install jq to run this script."
exit 1
fi
# Extract types
PDA_TYPES=$(jq '.types' "$PDA_FILE")
TOKEN_TYPES=$(jq '.types' "$TOKEN_FILE")
# Merge types and deduplicate
MERGED_TYPES=$(jq -s 'add | unique_by(.name)' <(echo "$PDA_TYPES") <(echo "$TOKEN_TYPES"))
# Generate TS content
MERGED_CONTENT=$(jq --argjson types "$MERGED_TYPES" '.types = $types' "$TOKEN_FILE")
# Generate TypeScript file with JSON object inline and place it in both destinations
{
echo -n "export type ${TYPE_NAME} = "
echo "$MERGED_CONTENT"
echo ";"
echo -n "export const IDL: ${TYPE_NAME} = "
echo "$MERGED_CONTENT"
echo ";"
} | tee "$TS_FILE" > "$TS_FILE_STATELESS"
echo "IDL for $TYPE_NAME generated at $TS_FILE and $TS_FILE_STATELESS"
export COREPACK_ENABLE_STRICT=0
# fmt
if ! command -v pnpm prettier &> /dev/null
then
echo "Prettier could not be found. Please install Prettier to run this script."
exit 1
fi
{
echo "Current directory: $(pwd)"
pnpm prettier --write "$TS_FILE" "$TS_FILE_STATELESS" && \
echo "Prettier formatting applied to $TS_FILE and $TS_FILE_STATELESS"
} || {
echo "Failed to apply Prettier formatting to $TS_FILE and $TS_FILE_STATELESS"
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/getAccountState.sh
|
#!/usr/bin/env bash
# run this to regenerate the following accounts:
# merkle_tree_pubkey
# nullifier_queue_pubkey
# governance_authority_pda
# group_pda
#
# to add more accounts to regenerate, add them to setup_test_programs_with_accounts and test script
cd test-programs/system-test && cargo test-sbf regenerate_accounts -- --ignored --nocapture && cd -
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/devenv.sh
|
#!/usr/bin/env bash
# Command to deactivate the devenv. It sets the old environment variables.
deactivate () {
PS1="${LIGHT_PROTOCOL_OLD_PS1}"
RUSTUP_HOME="${LIGHT_PROTOCOL_OLD_RUSTUP_HOME}"
CARGO_HOME="${LIGHT_PROTOCOL_OLD_CARGO_HOME}"
NPM_CONFIG_PREFIX="${LIGHT_PROTOCOL_OLD_NPM_CONFIG_PREFIX}"
PATH="${LIGHT_PROTOCOL_OLD_PATH}"
[ -n "${LIGHT_PROTOCOL_OLD_RUST_PATH}" ] && PATH="${LIGHT_PROTOCOL_OLD_RUST_PATH}"
[ -n "${LIGHT_PROTOCOL_OLD_CPATH}" ] && CPATH="${LIGHT_PROTOCOL_OLD_CPATH}"
unset LIGHT_PROTOCOL_DEVENV
unset LIGHT_PROTOCOL_TOPLEVEL
unset GOROOT
unset RUSTUP_HOME
unset CARGO_HOME
unset LIGHT_PROTOCOL_OLD_RUST_PATH
}
# Stop early if already in devenv.
if [ -z "${LIGHT_PROTOCOL_DEVENV:-}" ]; then
LIGHT_PROTOCOL_DEVENV=1
else
return
fi
# The root of the git repository.
LIGHT_PROTOCOL_TOPLEVEL="`git rev-parse --show-toplevel`"
# Shell prompt.
LIGHT_PROTOCOL_OLD_PS1="${PS1:-}"
PS1="[🧢 Light Protocol devenv] ${PS1:-}"
# Ensure that our rustup environment is used.
LIGHT_PROTOCOL_OLD_RUSTUP_HOME="${RUSTUP_HOME:-}"
RUSTUP_HOME="${LIGHT_PROTOCOL_TOPLEVEL}/.local/rustup"
LIGHT_PROTOCOL_OLD_CARGO_HOME="${CARGO_HOME:-}"
CARGO_HOME="${LIGHT_PROTOCOL_TOPLEVEL}/.local/cargo"
# Ensure that our npm prefix is used.
LIGHT_PROTOCOL_OLD_NPM_CONFIG_PREFIX="${NPM_CONFIG_PREFIX:-}"
NPM_CONFIG_PREFIX="${LIGHT_PROTOCOL_TOPLEVEL}/.local/npm-global"
# Always use our binaries first.
LIGHT_PROTOCOL_OLD_PATH="${PATH}"
PATH="${LIGHT_PROTOCOL_TOPLEVEL}/.local/bin:${PATH}"
PATH="${LIGHT_PROTOCOL_TOPLEVEL}/.local/cargo/bin:${PATH}"
PATH="${LIGHT_PROTOCOL_TOPLEVEL}/.local/go/bin:${PATH}"
PATH="${LIGHT_PROTOCOL_TOPLEVEL}/.local/npm-global/bin:${PATH}"
# Remove the original Rust-related PATH entries
PATH=$(echo "$PATH" | tr ':' '\n' | grep -vE "/.rustup/|/.cargo/" | tr '\n' ':' | sed 's/:$//')
# Define alias of `light` to use the CLI built from source.
alias light="${LIGHT_PROTOCOL_TOPLEVEL}/cli/test_bin/run"
# Define GOROOT for Go.
export GOROOT="${LIGHT_PROTOCOL_TOPLEVEL}/.local/go"
# Ensure Rust binaries are in PATH
PATH="${CARGO_HOME}/bin:${PATH}"
# Export the modified PATH
export PATH
if [[ "$(uname)" == "Darwin" ]]; then
LIGHT_PROTOCOL_OLD_CPATH="${CPATH:-}"
export CPATH="/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include:${CPATH:-}"
fi
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/install.sh
|
#!/usr/bin/env bash
set -euo pipefail
PREFIX="${PWD}/.local"
INSTALL_LOG="${PREFIX}/.install_log"
# Versions
VERSIONS=(
"go:1.21.7"
"node:20.9.0"
"pnpm:9.5.0"
"solana:1.18.22"
"anchor:anchor-v0.29.0"
"jq:jq-1.7.1"
"photon:0.50.0"
)
# Architecture-specific suffixes
SUFFIXES=(
"go_Darwin_x86_64:darwin-amd64"
"go_Darwin_arm64:darwin-arm64"
"go_Linux_x86_64:linux-amd64"
"go_Linux_aarch64:linux-arm64"
"node_Darwin_x86_64:darwin-x64"
"node_Darwin_arm64:darwin-arm64"
"node_Linux_x86_64:linux-x64"
"node_Linux_aarch64:linux-arm64"
"pnpm_Darwin_x86_64:macos-x64"
"pnpm_Darwin_arm64:macos-arm64"
"pnpm_Linux_x86_64:linuxstatic-x64"
"pnpm_Linux_aarch64:linuxstatic-arm64"
"solana_Darwin_x86_64:x86_64-apple-darwin"
"solana_Darwin_arm64:aarch64-apple-darwin"
"solana_Linux_x86_64:x86_64-unknown-linux-gnu"
"solana_Linux_aarch64:aarch64-unknown-linux-gnu"
"anchor_Darwin_x86_64:macos-amd64"
"anchor_Darwin_arm64:macos-arm64"
"anchor_Linux_x86_64:linux-amd64"
"anchor_Linux_aarch64:linux-arm64"
"jq_Darwin_x86_64:jq-osx-amd64"
"jq_Darwin_arm64:jq-macos-arm64"
"jq_Linux_x86_64:jq-linux-amd64"
"jq_Linux_aarch64:jq-linux-arm64"
)
OS=$(uname)
ARCH=$(uname -m)
log() { echo "$1" >> "$INSTALL_LOG"; }
is_installed() { grep -q "^$1$" "$INSTALL_LOG" 2>/dev/null; }
get_version() {
local key=$1
for item in "${VERSIONS[@]}"; do
IFS=':' read -r k v <<< "$item"
if [ "$k" = "$key" ]; then
echo "$v"
return
fi
done
echo "unknown"
}
get_suffix() {
local key="${1}_${OS}_${ARCH}"
for item in "${SUFFIXES[@]}"; do
IFS=':' read -r k v <<< "$item"
if [ "$k" = "$key" ]; then
echo "$v"
return
fi
done
echo "unknown"
}
download() {
curl -sSL --retry 5 --retry-delay 10 -o "$2" "$1"
chmod +x "$2"
}
install_go() {
if ! is_installed "go"; then
echo "Installing Go..."
local version=$(get_version "go")
local suffix=$(get_suffix "go")
local url="https://go.dev/dl/go${version}.${suffix}.tar.gz"
download "$url" "${PREFIX}/go.tar.gz"
tar -xzf "${PREFIX}/go.tar.gz" -C "${PREFIX}"
rm "${PREFIX}/go.tar.gz"
log "go"
fi
}
install_rust() {
if ! is_installed "rust"; then
echo "Installing Rust..."
export RUSTUP_HOME="${PREFIX}/rustup"
export CARGO_HOME="${PREFIX}/cargo"
curl --retry 5 --retry-delay 10 --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path
export PATH="${PREFIX}/cargo/bin:${PATH}"
rustup component add clippy rustfmt
cargo install cargo-expand wasm-pack
cargo install photon-indexer --version $(get_version "photon") --locked
log "rust"
fi
}
install_node() {
if ! is_installed "node"; then
echo "Installing Node.js..."
local version=$(get_version "node")
local suffix=$(get_suffix "node")
local url="https://nodejs.org/dist/v${version}/node-v${version}-${suffix}.tar.gz"
download "$url" "${PREFIX}/node.tar.gz"
tar -xzf "${PREFIX}/node.tar.gz" -C "${PREFIX}" --strip-components 1
rm "${PREFIX}/node.tar.gz"
log "node"
fi
}
install_pnpm() {
if ! is_installed "pnpm"; then
echo "Installing pnpm..."
local version=$(get_version "pnpm")
local suffix=$(get_suffix "pnpm")
local url="https://github.com/pnpm/pnpm/releases/download/v${version}/pnpm-${suffix}"
download "$url" "${PREFIX}/bin/pnpm"
chmod +x "${PREFIX}/bin/pnpm"
log "pnpm"
fi
}
install_solana() {
if ! is_installed "solana"; then
echo "Installing Solana..."
local version=$(get_version "solana")
local suffix=$(get_suffix "solana")
local url="https://github.com/anza-xyz/agave/releases/download/v${version}/solana-release-${suffix}.tar.bz2"
download "$url" "${PREFIX}/solana-release.tar.bz2"
tar -xjf "${PREFIX}/solana-release.tar.bz2" -C "${PREFIX}/bin" --strip-components 2
rm "${PREFIX}/solana-release.tar.bz2"
log "solana"
fi
}
install_anchor() {
if ! is_installed "anchor"; then
echo "Installing Anchor..."
local version=$(get_version "anchor")
local suffix=$(get_suffix "anchor")
local url="https://github.com/Lightprotocol/binaries/releases/download/${version}/anchor-${suffix}"
download "$url" "${PREFIX}/bin/anchor"
log "anchor"
fi
}
install_jq() {
if ! is_installed "jq"; then
echo "Installing jq..."
local version=$(get_version "jq")
local suffix=$(get_suffix "jq")
local url="https://github.com/jqlang/jq/releases/download/${version}/${suffix}"
download "$url" "${PREFIX}/bin/jq"
log "jq"
fi
}
download_gnark_keys() {
if ! is_installed "gnark_keys"; then
echo "Downloading gnark keys..."
ROOT_DIR="$(git rev-parse --show-toplevel)"
"${ROOT_DIR}/light-prover/scripts/download_keys.sh" "$key_type"
log "gnark_keys"
fi
}
install_dependencies() {
if ! is_installed "dependencies"; then
echo "Installing dependencies..."
export PATH="${PREFIX}/bin:${PATH}"
pnpm install
log "dependencies"
fi
}
main() {
mkdir -p "${PREFIX}/bin"
# Parse command line arguments
local key_type="light"
local reset_log=true
while [[ $# -gt 0 ]]; do
case $1 in
--full-keys)
key_type="full"
shift
;;
--no-reset)
reset_log=false
shift
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if $reset_log; then
rm -f "$INSTALL_LOG"
fi
install_go
install_rust
install_node
install_pnpm
install_solana
install_anchor
install_jq
download_gnark_keys "$key_type"
install_dependencies
echo "✨ Light Protocol development dependencies installed"
}
main
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/lint.sh
|
#!/usr/bin/env bash
set -e
npx nx run-many --target=format:check --all
npx nx run-many --target=lint --all
cargo fmt --all -- --check
cargo clippy \
--workspace \
--exclude photon-api \
--exclude name-service \
--exclude mixed-accounts \
-- -A clippy::result_large_err \
-A clippy::empty-docs \
-A clippy::to-string-trait-impl \
-D warnings
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/deploy-devnet.sh
|
# assumes that programs have been build with build-verifiable.sh
# Creates buffer accounts
# Buffer account addresses can be used in multisig action
# Array of program names
libraries=("account_compression" "light_compressed_token" "light_system_program" "light_registry")
BUFFER_KEYPAIR_PATH="target/buffer"
create_buffer_account() {
local max_retries=5
local attempt=1
local program_name="$1"
local program_name_keypair="$2"
while (( attempt <= max_retries )); do
echo "Attempt $attempt of $max_retries..."
echo "$BUFFER_KEYPAIR_PATH-$program_name_keypair.json"
buffer_pubkey=$(solana-keygen pubkey "target/deploy/$program_name-keypair.json")
echo "Buffer pubkey for $program_name: $buffer_pubkey"
if solana program deploy target/deploy/"$program_name".so --program-id $buffer_pubkey --buffer "$BUFFER_KEYPAIR_PATH-$program_name_keypair-keypair.json" --upgrade-authority ../../Downloads/87-id.json; then
echo "Command succeeded on attempt $attempt."
return 0
else
echo "Command failed on attempt $attempt."
((attempt++))
sleep 2
fi
done
echo "Command failed after $max_retries attempts."
return 1
}
# Iterate over each program name and build it
for program_name in "${libraries[@]}"; do
if [[ ! -f "$BUFFER_KEYPAIR_PATH" ]]; then
solana-keygen new --outfile "$BUFFER_KEYPAIR_PATH-$program_name-keypair.json" --no-bip39-passphrase
fi
create_buffer_account "$program_name" "$program_name"
done
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/release-all-rust-crates.sh
|
#!/usr/bin/env bash
# Configuration
CRATES_IO_TOKEN=${CRATES_IO_TOKEN}
# Ensure cargo, git, and gh are installed
command -v cargo >/dev/null 2>&1 || { echo >&2 "Cargo is not installed. Aborting."; exit 1; }
command -v git >/dev/null 2>&1 || { echo >&2 "Git is not installed. Aborting."; exit 1; }
command -v gh >/dev/null 2>&1 || { echo >&2 "GitHub CLI is not installed. Aborting."; exit 1; }
# Parse command line arguments
RELEASE_PROGRAMS=false
RELEASE_SDKS=false
while [[ "$#" -gt 0 ]]; do
case $1 in
--programs) RELEASE_PROGRAMS=true ;;
--sdks) RELEASE_SDKS=true ;;
*) echo "Unknown parameter passed: $1"; exit 1 ;;
esac
shift
done
if [ "$RELEASE_PROGRAMS" = false ] && [ "$RELEASE_SDKS" = false ]; then
echo "Please specify --programs or --sdks (or both)"
exit 1
fi
echo "Logging in to crates.io..."
cargo login "${CRATES_IO_TOKEN}"
PROGRAMS=("aligned-sized" "light-heap" "light-bounded-vec" "light-utils" "light-hasher" "light-macros" "light-hash-set" "light-merkle-tree-reference" "light-concurrent-merkle-tree" "light-indexed-merkle-tree" "light-prover-client" "light-verifier" "account-compression" "light-system-program" "light-registry" "light-compressed-token")
SDKS=("photon-api" "forester-utils" "light-test-utils" "light-sdk-macros" "light-sdk")
release_packages() {
local packages=("$@")
for PACKAGE in "${packages[@]}"; do
PKG_VERSION=$(cargo pkgid -p "$PACKAGE" | cut -d "#" -f2)
VERSION=${PKG_VERSION#*@}
echo "Creating tag for Rust package: $PACKAGE v$VERSION"
git tag "${PACKAGE}-v${VERSION}"
git push origin "${PACKAGE}-v${VERSION}"
for attempt in {1..2}; do
echo "Attempt $attempt: Publishing $PACKAGE..."
cargo release publish --package "$PACKAGE" --execute --no-confirm && break || echo "Attempt $attempt failed, retrying in 10..."
sleep 10
done
done
}
if [ "$RELEASE_PROGRAMS" = true ]; then
echo "Releasing programs..."
release_packages "${PROGRAMS[@]}"
fi
if [ "$RELEASE_SDKS" = true ]; then
echo "Releasing SDKs..."
release_packages "${SDKS[@]}"
fi
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/push-stateless-js-idls.sh
|
#!/usr/bin/env bash
cd "$(git rev-parse --show-toplevel)"
SOURCE_DIR="./target/types"
DEST_DIR="./js"
DEST_DIR_STATELESS="$DEST_DIR/stateless.js/src/idls"
FILES_TO_COPY=(
"account_compression.ts"
"light_system_program.ts"
"light_registry.ts"
)
# copy each type file into the respective location
for FILE in "${FILES_TO_COPY[@]}"; do
if [ ! -f "$SOURCE_DIR/$FILE" ]; then
echo "Error: $FILE not found."
exit 1
else
cp "$SOURCE_DIR/$FILE" $DEST_DIR_STATELESS
fi
done
echo "IDL type files pushed to directories."
export COREPACK_ENABLE_STRICT=0
# fmt
if ! command -v pnpm prettier &> /dev/null
then
echo "Prettier could not be found. Please install Prettier to run this script."
exit 1
fi
for FILE in "${FILES_TO_COPY[@]}"; do
pnpm prettier --write "$DEST_DIR_STATELESS/$FILE"
done
echo "IDL type files formatted succesfully."
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/build.sh
|
#!/usr/bin/env bash
command -v pnpm >/dev/null 2>&1 || { echo >&2 "pnpm is not installed. Aborting."; exit 1; }
command -v npx >/dev/null 2>&1 || { echo >&2 "npx is not installed. Aborting."; exit 1; }
. "./scripts/devenv.sh" || { echo >&2 "Failed to source devenv.sh. Aborting."; exit 1; }
set -eux
pnpm install || { echo >&2 "Failed to install dependencies. Aborting."; exit 1; }
if [ ! -f target/deploy/spl_noop.so ]; then
mkdir -p target/deploy && cp third-party/solana-program-library/spl_noop.so target/deploy
fi
npx nx run-many --target=build --all
echo "Build process completed successfully."
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/clean.sh
|
#!/usr/bin/env bash
find . -type d -name "test-ledger" -exec sh -c 'echo "Deleting {}"; rm -rf "{}"' \;
npx nx reset
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/deploy.sh
|
# assumes that programs have been build with build-verifiable.sh
# Creates buffer accounts
# Buffer account addresses can be used in multisig action
# Array of program names
libraries=("account_compression" "light_compressed_token" "light_system_program" "light_registry")
BUFFER_KEYPAIR_PATH="target/buffer"
create_buffer_account() {
local max_retries=5
local attempt=1
local program_name="$1"
local program_name_keypair="$2"
while (( attempt <= max_retries )); do
echo "Attempt $attempt of $max_retries..."
echo "$BUFFER_KEYPAIR_PATH-$program_name_keypair.json"
if solana program write-buffer target/deploy/"$program_name".so --buffer "$BUFFER_KEYPAIR_PATH-$program_name_keypair-keypair.json"; then
echo "Command succeeded on attempt $attempt."
return 0
else
echo "Command failed on attempt $attempt."
((attempt++))
sleep 2
fi
done
echo "Command failed after $max_retries attempts."
return 1
}
# Iterate over each program name and build it
for program_name in "${libraries[@]}"; do
if [[ ! -f "$BUFFER_KEYPAIR_PATH" ]]; then
solana-keygen new --outfile "$BUFFER_KEYPAIR_PATH-$program_name-keypair.json" --no-bip39-passphrase
fi
create_buffer_account "$program_name" "$program_name"
buffer_pubkey=$(solana-keygen pubkey "$BUFFER_KEYPAIR_PATH-$program_name-keypair.json")
echo "Buffer pubkey for $program_name: $buffer_pubkey"
solana program set-buffer-authority "$buffer_pubkey" --new-buffer-authority 7PeqkcCXeqgsp5Mi15gjJh8qvSLk7n3dgNuyfPhJJgqY
echo "Buffer created and authority set for $program_name"
done
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/tsc-create-r1cs.sh
|
#!/usr/bin/env bash
# Ensure we're working from the root directory of the monorepo
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
REPO_ROOT="$( cd "$SCRIPT_DIR/.." && pwd )"
SEMAPHORE_MTB_SETUP="../semaphore-mtb-setup/semaphore-mtb-setup"
cd "$REPO_ROOT"
# Get phase 1 ptau file.
# TODO: fix when extracting keys again
# echo "Performing pre-steps..."
# cd ..
# git clone https://github.com/worldcoin/semaphore-mtb-setup
# cd semaphore-mtb-setup && go build -v
# wget https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_17.ptau 17.ptau
# mkdir -p "$REPO_ROOT/ceremony"
# $SEMAPHORE_MTB_SETUP p1i 17.ptau "$REPO_ROOT/ceremony/17.ph1"
cd "$REPO_ROOT"
# Set the output directory
OUTPUT_DIR="$REPO_ROOT/ceremony/r1cs"
# Delete prior contents if the directory exists
if [ -d "$OUTPUT_DIR" ]; then
rm -rf "$OUTPUT_DIR"/*
fi
# Create the r1cs directory
mkdir -p "$OUTPUT_DIR"
# Function to generate R1CS for a given circuit type and parameters
generate_r1cs() {
local circuit=$1
local inclusion_accounts=$2
local non_inclusion_accounts=$3
local height=$4
local output_file=$5
./light-prover/light-prover r1cs \
--circuit "$circuit" \
--inclusion-compressed-accounts "$inclusion_accounts" \
--non-inclusion-compressed-accounts "$non_inclusion_accounts" \
--inclusion-tree-height "$height" \
--non-inclusion-tree-height "$height" \
--output "$OUTPUT_DIR/$output_file"
echo "Generated $output_file"
}
PH2_DIR="$REPO_ROOT/../experiment/gnark-mt-setup/contributions/0016_badcryptobitch/"
# Generate R1CS for inclusion circuits
for accounts in 1 2 3 4 8; do
generate_r1cs "inclusion" "$accounts" "0" "26" "inclusion_26_${accounts}_contribution_0.r1cs"
./../semaphore-mtb-setup/semaphore-mtb-setup p2n "$REPO_ROOT/ceremony/17.ph1" "$OUTPUT_DIR/inclusion_26_${accounts}_contribution_0.r1cs" "$OUTPUT_DIR/inclusion_26_${accounts}_dummy.ph1"
./../semaphore-mtb-setup/semaphore-mtb-setup key "${PH2_DIR}inclusion_26_${accounts}_badcryptobitch_contribution_16.ph2"
./light-prover/light-prover import-setup --circuit "inclusion" --inclusion-compressed-accounts "$accounts" --inclusion-tree-height 26 --pk ./../pk --vk ./../vk --output ./light-prover/proving-keys/inclusion_26_${accounts}.key
done
# Generate R1CS for non-inclusion circuits
for accounts in 1 2; do
generate_r1cs "non-inclusion" "0" "$accounts" "26" "non-inclusion_26_${accounts}_contribution_0.r1cs"
./../semaphore-mtb-setup/semaphore-mtb-setup p2n "$REPO_ROOT/ceremony/17.ph1" "$OUTPUT_DIR/non-inclusion_26_${accounts}_contribution_0.r1cs" "$OUTPUT_DIR/non_inclusion_26_${accounts}_dummy.ph1"
./../semaphore-mtb-setup/semaphore-mtb-setup key "${PH2_DIR}non-inclusion_26_${accounts}_badcryptobitch_contribution_16.ph2"
./light-prover/light-prover import-setup --circuit "non-inclusion" --non-inclusion-compressed-accounts "$accounts" --non-inclusion-tree-height 26 --pk ./../pk --vk ./../vk --output ./light-prover/proving-keys/non-inclusion_26_${accounts}.key
done
# Generate R1CS for combined circuits
for inclusion_accounts in 2 3 4; do
for non_inclusion_accounts in 1 2; do
generate_r1cs "combined" "$inclusion_accounts" "$non_inclusion_accounts" "26" "combined_26_${inclusion_accounts}_${non_inclusion_accounts}_contribution_0.r1cs"
./../semaphore-mtb-setup/semaphore-mtb-setup p2n "$REPO_ROOT/ceremony/17.ph1" "$OUTPUT_DIR/combined_26_${inclusion_accounts}_${non_inclusion_accounts}_contribution_0.r1cs" "$OUTPUT_DIR/combined_26_${inclusion_accounts}_${non_inclusion_accounts}_dummy.ph1"
./../semaphore-mtb-setup/semaphore-mtb-setup key "${PH2_DIR}combined_26_${inclusion_accounts}_${non_inclusion_accounts}_badcryptobitch_contribution_16.ph2"
./light-prover/light-prover import-setup --circuit "combined" --inclusion-compressed-accounts "$inclusion_accounts" --inclusion-tree-height 26 --non-inclusion-compressed-accounts "$non_inclusion_accounts" --non-inclusion-tree-height 26 --pk ./../pk --vk ./../vk --output ./light-prover/proving-keys/combined_26_${inclusion_accounts}_${non_inclusion_accounts}.key
done
done
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/test.sh
|
#!/usr/bin/env bash
. "./scripts/devenv.sh" || { echo >&2 "Failed to source devenv.sh. Aborting."; exit 1; }
set -eux
npx nx run-many --target=test --all --parallel=false
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/build-verifiable.sh
|
#!/usr/bin/env bash
# Builds the verifiable solana-verify crate
solana-verify build --library-name account_compression &&\
solana-verify build --library-name light_compressed_token &&\
solana-verify build --library-name light_system_program &&\
solana-verify build --library-name light_registry
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/coverage.sh
|
#!/usr/bin/env bash
. "./scripts/devenv.sh" || { echo >&2 "Failed to source devenv.sh. Aborting."; exit 1; }
set -eux
export RUST_MIN_STACK=8388608
export RUSTFLAGS="-D warnings"
ROOT_DIR=$(git rev-parse --show-toplevel)
cargo llvm-cov \
--all-targets --workspace \
--exclude light-concurrent-merkle-tree \
--exclude photon-api \
--exclude forester \
--html \
--output-dir "${ROOT_DIR}/target/llvm-cov" \
--open
cargo llvm-cov \
--all-targets \
--package light-concurrent-merkle-tree \
--html \
--output-dir "${ROOT_DIR}/target/llvm-cov-cmt" \
--open
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/bump-versions-and-publish-npm.sh
|
#!/usr/bin/env bash
cd "$(git rev-parse --show-toplevel)"
if ! command -v pnpm &> /dev/null; then
echo "pnpm is not installed. Please install pnpm first."
exit 1
fi
get_package_dir() {
case "$1" in
"@lightprotocol/hasher.rs") echo "hasher.rs" ;;
"@lightprotocol/stateless.js") echo "js/stateless.js" ;;
"@lightprotocol/compressed-token") echo "js/compressed-token" ;;
"@lightprotocol/zk-compression-cli") echo "cli" ;;
*) echo "" ;;
esac
}
# Bump version and publish
publish_package() {
local package_name=$1
local version_type=$2
local package_dir=$(get_package_dir "$package_name")
if [ -z "$package_dir" ]; then
echo "No directory mapping found for package $package_name."
return 1
fi
echo "Publishing ${package_name} in directory ${package_dir} with a ${version_type} version bump..."
# set exec permissions
find "cli/bin" -type f -exec chmod +x {} +
sleep 5
if ! (cd "${package_dir}" && pnpm version "${version_type}" && pnpm publish --access public --no-git-checks); then
echo "Error occurred while publishing ${package_name}."
return 1
fi
}
# Defaults to 'patch' if no version type is provided
version_type=${1:-patch}
shift # Remove first arg (version type)
error_occurred=0
if [ "$#" -eq 0 ]; then
echo "Bumping ${version_type} version for all packages..."
if ! pnpm -r exec -- pnpm version "${version_type}" || ! pnpm -r exec -- pnpm publish --access public; then
echo "Error occurred during bulk version bump and publish."
error_occurred=1
fi
else
# If specific packages are provided, bump version for those packages
for package_name in "$@"; do
if ! publish_package "${package_name}" "${version_type}"; then
error_occurred=1
fi
done
fi
if [ "$error_occurred" -eq 1 ]; then
echo "NPM release process completed with errors."
else
echo "NPM release process completed successfully."
fi
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/scripts/format.sh
|
#!/usr/bin/env bash
set -e
npx nx run-many --target=format --all
npx nx run-many --target=lint:fix --all
cargo fmt --all
cargo clippy \
--workspace \
--exclude name-service \
--exclude photon-api \
--exclude name-service \
-- -A clippy::result_large_err \
-A clippy::empty-docs \
-A clippy::to-string-trait-impl \
-A unexpected-cfgs \
-A clippy::doc_lazy_continuation \
-D warnings
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/vitest.node.config.ts
|
import { defineConfig, mergeConfig } from 'vitest/config';
import viteConfig from './vitest.config';
export default mergeConfig(viteConfig, defineConfig({
resolve: {
conditions: ["node-addons"]
},
}));
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/rollup.config.js
|
import typescript from "@rollup/plugin-typescript";
import { wasm } from "@rollup/plugin-wasm";
import pkg from "./package.json";
import copy from "rollup-plugin-copy";
const outdir = (fmt, platform, inline) => {
return `${platform}${inline ? `-${inline}` : ""}/${fmt}`;
};
const rolls = (fmt, platform, inline) => ({
input: `src/main/index_${platform}${inline ? `_${inline}` : ""}.ts`,
output: {
dir: "dist",
format: fmt,
entryFileNames: `${outdir(fmt, platform, inline)}/[name].${
fmt === "cjs" ? "cjs" : "js"
}`,
name: pkg.name,
globals: {
os: "os",
"@coral-xyz/anchor": "anchor",
},
},
external: ["os", "@coral-xyz/anchor"],
plugins: [
inline !== "slim" && wasm({ targetEnv: "auto-inline" }),
typescript({
target: fmt === "es" ? "ES2022" : "ES2017",
outDir: `dist/${outdir(fmt, platform, inline)}`,
rootDir: "src",
}),
/// Note: This is a temporary hack to copy the wasm files to the dist folder
/// Which then allows `stateless.js` to copy them to its own dist where then
/// web-apps can consume them. We will remove this once we extract the
/// test-helpers pkg into its own library to stop the bloat. This is
/// dependent on fixing the photon indexer to return correct merkle proofs
/// such that `stateless.js` doesn't require its own hasher. Long term, we
/// need to optimize our hasher library regardless, to more efficiently
/// support custom hashing schemas.
copy({
targets: [
{
src: "src/main/wasm/light_wasm_hasher_bg.wasm",
dest: "dist/",
},
{
src: "src/main/wasm-simd/hasher_wasm_simd_bg.wasm",
dest: "dist/",
},
],
}),
],
});
export default [
rolls("umd", "browser", "fat"),
rolls("cjs", "browser", "fat"),
rolls("es", "browser", "fat"),
rolls("cjs", "browser", "slim"),
rolls("es", "browser", "slim"),
];
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/package.json
|
{
"name": "@lightprotocol/hasher.rs",
"version": "0.2.0",
"description": "",
"keywords": [],
"author": "",
"license": "ISC",
"sideEffects": false,
"type": "module",
"main": "./dist/browser-fat/umd/index_browser_fat.js",
"module": "./dist/browser-fat/es/index_browser_fat.js",
"exports": {
".": {
"types": "./dist/types/main/index_browser_fat.d.ts",
"import": "./dist/browser-fat/es/index_browser_fat.js",
"default": "./dist/browser-fat/cjs/index_browser_fat.cjs"
},
"./slim": {
"types": "./dist/types/main/index_browser_slim.d.ts",
"import": "./dist/browser-slim/es/index_browser_slim.js",
"default": "./dist/browser-slim/cjs/index_browser_slim.cjs"
},
"./sisd.wasm": "./dist/hasher_wasm_bg.wasm",
"./simd.wasm": "./dist/hasher_wasm_simd_bg.wasm",
"./package.json": "./package.json"
},
"types": "./dist/types/main/index_browser_fat.d.ts",
"files": [
"dist"
],
"scripts": {
"build:wasm": "wasm-pack build -t web --out-dir ../main/wasm src/wasm",
"build:wasm-simd": "RUSTFLAGS='-C target-feature=+simd128' npm run build:wasm-simd-ci",
"build:wasm-simd-ci": "wasm-pack build -t web --out-name hasher_wasm_simd --out-dir ../main/wasm-simd src/wasm",
"build:bundle": "rollup -c --bundleConfigAsCjs",
"build": "rm -rf dist/ && pnpm build:wasm && pnpm build:wasm-simd && pnpm build:bundle",
"test": "pnpm test:native",
"test:types": "tsc",
"test:native": "vitest run --config vitest.node.config.ts --dir tests",
"test:browser": "vitest run --config vitest.config.ts --browser.name=firefox --browser.provider=playwright --browser.headless --dir tests",
"test-wasm": "cd zk-rs && cargo test -- --nocapture",
"bench-blake": "node_modules/.bin/tsx ./benches/blake.test.ts",
"bench-poseidon": "node_modules/.bin/tsx ./benches/poseidon.test.ts",
"bench": "pnpm run bench-blake && pnpm run bench-poseidon"
},
"devDependencies": {
"@coral-xyz/anchor": "0.29.0",
"@lightprotocol/tsconfig": "workspace:*",
"@noble/hashes": "^1.3.2",
"@rollup/plugin-node-resolve": "^15.2.3",
"@rollup/plugin-typescript": "^11.1.5",
"@rollup/plugin-wasm": "^6.2.2",
"@types/chai": "^4.3.16",
"@types/mocha": "^10.0.7",
"@vitest/browser": "^1.6.0",
"circomlibjs": "^0.1.7",
"ffjavascript": "^0.3.0",
"mocha": "^10.6.0",
"playwright": "^1.45.1",
"rollup": "^4.6.1",
"rollup-plugin-copy": "^3.5.0",
"tinybench": "^2.5.1",
"ts-mocha": "^10.0.0",
"ts-node": "^10.9.2",
"tsx": "^4.1.2",
"tweetnacl": "^1.0.3",
"vite-plugin-env-compatible": "^1.1.1",
"vitest": "^1.6.0"
},
"nx": {
"targets": {
"build": {
"outputs": [
"{workspaceRoot}/dist"
]
}
}
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/tsconfig.json
|
{
"$schema": "https://json.schemastore.org/tsconfig",
"compilerOptions": {
"target": "ES2022",
"noEmit": true,
"module": "ES2022",
"lib": ["dom", "ES2022"],
"types": ["vitest/globals"],
"declaration": true,
"declarationDir": "dist/types",
"strict": true,
"moduleResolution": "node",
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"extends": "@lightprotocol/tsconfig/base.json",
"include": ["src", "tests"]
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/tsconfig.benches.json
|
{
"$schema": "https://json.schemastore.org/tsconfig",
"compilerOptions": {
"outDir": "./lib",
"rootDirs": ["src", "benches"],
"baseUrl": "src",
"esModuleInterop": true,
"target": "ESNext",
"module": "ESNext"
},
"extends": "tsconfig.json",
"include": ["benches"]
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/vitest.config.ts
|
import { defineConfig } from 'vitest/config'
export default defineConfig({
test: {
globals: true,
},
});
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/tests/wasm.test.ts
|
import { blake2b } from "@noble/hashes/blake2b";
import { WasmFactory } from "..";
import {BN} from "@coral-xyz/anchor";
function isNode() {
return (
Object.prototype.toString.call(
typeof process !== "undefined" ? process : 0
) === "[object process]"
);
}
describe("Test hasher", () => {
beforeEach(() => {
WasmFactory.resetModule();
});
it("Test poseidon216", async () => {
let input = new BN([
216, 137, 85, 159, 239, 194, 107, 138,
254, 68, 21, 16, 165, 41, 64, 148,
208, 198, 201, 59, 220, 102, 142, 81,
49, 251, 174, 183, 183, 182, 4, 32
]);
input = new BN(new Uint8Array(input.toArray()).slice(1, 32), undefined, "be");
const expected = [39,11,135,4,126,124,21,55,122,162,99,228,196,251,107,128,181,191,102,183,35,64,122,163,42,155,219,100,30,89,203,0];
const mod = await WasmFactory.loadModule();
const hash = mod.create();
const wasmOutput = new BN(hash.poseidonHash([input])).toArray("be", 32);
assert.equal(wasmOutput.toString(), expected.toString());
});
it("Test blake2-simd", async () => {
const input = "foobar";
const tsBlake = blake2b.create({ dkLen: 32 }).update(input).digest().toString()
const mod = await WasmFactory.loadModule();
const hash = mod.create();
const wasmBlake = hash.blakeHash(input, 32).toString();
assert.equal(tsBlake, wasmBlake);
})
it("Test Poseidon", async () => {
const inputs = new BN(1).toString();
const expectedHash = [
41, 23, 97, 0, 234, 169, 98, 189,
193, 254, 108, 101, 77, 106, 60, 19,
14, 150, 164, 209, 22, 139, 51, 132,
139, 137, 125, 197, 2, 130, 1, 51
];
const mod = await WasmFactory.loadModule();
const hash = mod.create();
const rsHash = hash.poseidonHash([inputs]);
assert.equal(expectedHash.toString(), rsHash.toString());
});
it("Test Poseidon 1..12", async() => {
let TEST_CASES = [
[
41, 23, 97, 0, 234, 169, 98, 189, 193, 254, 108, 101, 77, 106, 60, 19, 14, 150, 164, 209,
22, 139, 51, 132, 139, 137, 125, 197, 2, 130, 1, 51,
],
[
0, 122, 243, 70, 226, 211, 4, 39, 158, 121, 224, 169, 243, 2, 63, 119, 18, 148, 167, 138,
203, 112, 231, 63, 144, 175, 226, 124, 173, 64, 30, 129,
],
[
2, 192, 6, 110, 16, 167, 42, 189, 43, 51, 195, 178, 20, 203, 62, 129, 188, 177, 182, 227,
9, 97, 205, 35, 194, 2, 177, 134, 115, 191, 37, 67,
],
[
8, 44, 156, 55, 10, 13, 36, 244, 65, 111, 188, 65, 74, 55, 104, 31, 120, 68, 45, 39, 216,
99, 133, 153, 28, 23, 214, 252, 12, 75, 125, 113,
],
[
16, 56, 150, 5, 174, 104, 141, 79, 20, 219, 133, 49, 34, 196, 125, 102, 168, 3, 199, 43,
65, 88, 156, 177, 191, 134, 135, 65, 178, 6, 185, 187,
],
[
42, 115, 246, 121, 50, 140, 62, 171, 114, 74, 163, 229, 189, 191, 80, 179, 144, 53, 215,
114, 159, 19, 91, 151, 9, 137, 15, 133, 197, 220, 94, 118,
],
[
34, 118, 49, 10, 167, 243, 52, 58, 40, 66, 20, 19, 157, 157, 169, 89, 190, 42, 49, 178,
199, 8, 165, 248, 25, 84, 178, 101, 229, 58, 48, 184,
],
[
23, 126, 20, 83, 196, 70, 225, 176, 125, 43, 66, 51, 66, 81, 71, 9, 92, 79, 202, 187, 35,
61, 35, 11, 109, 70, 162, 20, 217, 91, 40, 132,
],
[
14, 143, 238, 47, 228, 157, 163, 15, 222, 235, 72, 196, 46, 187, 68, 204, 110, 231, 5, 95,
97, 251, 202, 94, 49, 59, 138, 95, 202, 131, 76, 71,
],
[
46, 196, 198, 94, 99, 120, 171, 140, 115, 48, 133, 79, 74, 112, 119, 193, 255, 146, 96,
228, 72, 133, 196, 184, 29, 209, 49, 173, 58, 134, 205, 150,
],
[
0, 113, 61, 65, 236, 166, 53, 241, 23, 212, 236, 188, 235, 95, 58, 102, 220, 65, 66, 235,
112, 181, 103, 101, 188, 53, 143, 27, 236, 64, 187, 155,
],
[
20, 57, 11, 224, 186, 239, 36, 155, 212, 124, 101, 221, 172, 101, 194, 229, 46, 133, 19,
192, 129, 193, 205, 114, 201, 128, 6, 9, 142, 154, 143, 190,
],
];
let inputs: BN[] = [];
let value: BN = new BN(1);
for (let i = 0; i < TEST_CASES.length; i++) {
inputs.push(value);
const mod = await WasmFactory.loadModule();
const hash = mod.create();
const rsHash = hash.poseidonHash(inputs);
assert.equal(TEST_CASES[i].toString(), Array.from(rsHash).toString());
}
inputs = [];
value = new BN(2);
for (let i = 0; i < TEST_CASES.length; i++) {
inputs.push(value);
const mod = await WasmFactory.loadModule();
const hash = mod.create();
const rsHash = hash.poseidonHash(inputs);
assert.notEqual(TEST_CASES[i].toString(), Array.from(rsHash).toString());
}
})
});
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/benches/poseidon.test.ts
|
import {blake2str, poseidon} from "light-wasm";
import {BN} from "@coral-xyz/anchor";
import { blake2b } from "@noble/hashes/blake2b";
import * as circomlibjs from "circomlibjs";
import { Bench } from 'tinybench';
const bench = new Bench({ time: 5000 });
const circomPoseidon = await circomlibjs.buildPoseidonOpt();
bench
.add('wasm poseidon', () => {
poseidon(["1"]);
})
.add('circom poseidon', async () => {
circomPoseidon(["1"]);
})
await bench.run();
console.table(bench.table());
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/benches/blake.test.ts
|
import { blake2b } from "@noble/hashes/blake2b";
import { Bench } from 'tinybench';
const bench = new Bench({ time: 1000 });
bench
.add('@noble/hashes/blake2b', async () => {
blake2b.create({ dkLen: 32 }).update("").digest();
})
await bench.run();
console.table(bench.table());
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/Cargo.toml
|
[package]
name = "light-wasm-hasher"
version = "1.1.0"
edition = "2021"
description = "WASM wrapper for blake2 and Poseidon hashing"
repository = "https://github.com/Lightprotocol/light-protocol"
license = "Apache-2.0"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
wasm-bindgen = "0.2.89"
light-poseidon = "0.2"
ark-bn254 = "0.4.0"
ark-ff = "0.4.0"
js-sys = "0.3.64"
web-sys = { version = "0.3.69", features = ['console'] }
hex = { version = "0.4.3", features = [] }
blake2b_simd = "1"
num-bigint = "0.4.6"
console_error_panic_hook = "0.1.7"
thiserror = "1.0.64"
[package.metadata.wasm-pack.profile.release]
wasm-opt = false
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/Cargo.lock
|
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "bumpalo"
version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "log"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
[[package]]
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "proc-macro2"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "2.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "wasm-bindgen"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
[[package]]
name = "wasmbyexample"
version = "0.1.0"
dependencies = [
"wasm-bindgen",
]
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/src/lib.rs
|
mod hash;
pub fn set_panic_hook() {
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.
//
// For more details see
// https://github.com/rustwasm/console_error_panic_hook#readme
#[cfg(feature = "console_error_panic_hook")]
console_error_panic_hook::set_once();
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/src/hash/blake2.rs
|
use blake2b_simd::Params;
use wasm_bindgen::prelude::wasm_bindgen;
#[wasm_bindgen(js_name = blake2str)]
pub fn blake2_string(input: String, hash_length: usize) -> Vec<u8> {
Params::new()
.hash_length(hash_length)
.hash(input.as_bytes())
.as_bytes()
.to_vec()
}
#[wasm_bindgen(js_name = blake2)]
pub fn blake2(input: &[u8], hash_length: usize) -> Vec<u8> {
Params::new()
.hash_length(hash_length)
.hash(input)
.as_bytes()
.to_vec()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn blake_4() {
let input: [u8; 64] = [
8, 11, 255, 174, 253, 221, 253, 111, 32, 197, 22, 38, 135, 201, 120, 114, 203, 112, 85,
63, 101, 26, 5, 118, 231, 206, 220, 12, 10, 137, 200, 136, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
];
let hash = blake2(&input, 4);
let expected_output: [u8; 4] = [55, 154, 4, 63];
assert_eq!(hash.as_slice(), &expected_output);
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/src/hash/mod.rs
|
pub mod blake2;
pub mod poseidon;
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/wasm/src/hash/poseidon.rs
|
use ark_bn254::Fr;
use js_sys::{Array, Uint8Array};
use light_poseidon::{Poseidon, PoseidonBytesHasher, PoseidonError};
use num_bigint::BigUint;
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
use crate::set_panic_hook;
#[wasm_bindgen]
pub fn poseidon(inputs: &Array) -> Result<Uint8Array, JsValue> {
set_panic_hook();
let inputs_res: Result<Vec<Vec<u8>>, JsValue> = inputs
.iter()
.map(|val| {
if let Some(str_val) = val.as_string() {
let big_int = BigUint::parse_bytes(str_val.as_bytes(), 10)
.ok_or_else(|| JsValue::from_str("Error parsing string to BigUint"))?;
Ok(big_int.to_bytes_be())
} else {
Err(JsValue::from_str(
"All elements in the array should be strings representable as numbers",
))
}
})
.collect();
let binding = inputs_res?;
let binding = binding.iter().map(|val| val.as_slice()).collect::<Vec<_>>();
let hash_res = poseidon_hash(binding.as_slice());
match hash_res {
Ok(val) => {
let js_arr = Uint8Array::from(&val[..]);
Ok(js_arr)
}
Err(e) => Err(JsValue::from_str(&e.to_string())),
}
}
pub fn poseidon_hash(input: &[&[u8]]) -> Result<Vec<u8>, PoseidonError> {
let hasher = Poseidon::<Fr>::new_circom(input.len());
let hash = hasher?.hash_bytes_be(input);
Ok(hash?.to_vec())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn poseidon_1() {
let hash_of_1: [u8; 32] = [
41, 23, 97, 0, 234, 169, 98, 189, 193, 254, 108, 101, 77, 106, 60, 19, 14, 150, 164,
209, 22, 139, 51, 132, 139, 137, 125, 197, 2, 130, 1, 51,
];
let input_of_1 = [vec![0u8; 31], vec![1u8]].concat();
let inputs = vec![input_of_1.as_slice()];
let hash = poseidon_hash(&inputs).unwrap();
assert_eq!(hash, hash_of_1.to_vec());
}
#[test]
fn poseidon_216() {
let inputs = vec![
216, 137, 85, 159, 239, 194, 107, 138, 254, 68, 21, 16, 165, 41, 64, 148, 208, 198,
201, 59, 220, 102, 142, 81, 49, 251, 174, 183, 183, 182, 4, 32,
];
let mut hasher = Poseidon::<Fr>::new_circom(1).unwrap();
let hash = hasher.hash_bytes_be(&[inputs.as_slice()]);
assert!(hash.is_err());
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/main/index_browser_fat.ts
|
export type { LightWasm, LightWasmCreator, HasherLoadOptions } from "./model.js";
export { WasmFactory, hasSimd as hasWasmSimd } from "./wasm.js";
import wasm from "./wasm/light_wasm_hasher_bg.wasm";
import wasmSimd from "./wasm-simd/hasher_wasm_simd_bg.wasm";
import { setWasmInit, setWasmSimdInit } from "./wasm.js";
// @ts-ignore
setWasmInit(() => wasm());
// @ts-ignore
setWasmSimdInit(() => wasmSimd());
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/main/index_node.ts
|
export type { LightWasm, LightWasmCreator, HasherLoadOptions } from "./model.js";
export { WasmFactory, hasSimd as hasWasmSimd } from "./wasm.js";
import wasm from "./wasm/light_wasm_hasher_bg.wasm";
import wasmSimd from "./wasm-simd/hasher_wasm_simd_bg.wasm";
import { setWasmInit, setWasmSimdInit } from "./wasm.js";
// @ts-ignore
setWasmInit(() => wasm());
// @ts-ignore
setWasmSimdInit(() => wasmSimd());
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/main/wasm.ts
|
import type {
LightWasmCreator,
LightWasm,
InitInput,
WasmInput,
HasherLoadOptions,
} from "./model.js";
import init, {
blake2 as blake2Wasm,
blake2str as blake2strWasm,
poseidon as poseidonWasm,
} from "./wasm/light_wasm_hasher";
import simdInit, {
blake2 as blake2Simd,
blake2str as blake2strSimd,
poseidon as poseidonSimd,
} from "./wasm-simd/hasher_wasm_simd";
import { BN } from "@coral-xyz/anchor";
function stringify(input: string[] | BN[]): string[] {
if (input.length > 0 && input[0] instanceof BN) {
return (input as BN[]).map((item) => item.toString(10));
} else {
return input as string[];
}
}
let wasmInit: (() => InitInput) | undefined = undefined;
export const setWasmInit = (arg: () => InitInput) => {
wasmInit = arg;
};
let wasmSimdInit: (() => InitInput) | undefined = undefined;
export const setWasmSimdInit = (arg: () => InitInput) => {
wasmSimdInit = arg;
};
const isWasmInput = (
x?: HasherLoadOptions["wasm"]
): x is WasmInput | undefined =>
x === undefined || (typeof x === "object" && "simd" in x);
/**
* hasher.rs implemented in Web assembly.
*/
export class WasmFactory {
static async loadModule(
options?: Partial<HasherLoadOptions>
): Promise<LightWasmCreator> {
if (isWasmInput(options?.wasm)) {
const useSimd = options?.simd ?? hasSimd();
if (!useSimd) {
return await loadWasm(options?.wasm?.sisd);
} else {
return await loadWasmSimd(options?.wasm?.simd);
}
} else {
return await loadWasm(options?.wasm);
}
}
static async loadHasher(
options?: Partial<HasherLoadOptions>
): Promise<LightWasm> {
const module = await WasmFactory.loadModule(options);
return module.create();
}
static resetModule() {
sisdMemory = undefined;
simdMemory = undefined;
}
static async getInstance(): Promise<LightWasm> {
return (await WasmFactory.loadModule()).create();
}
}
interface HashStrategy {
blake2str(input: string, hash_length: number): Uint8Array;
blake2(input: Uint8Array, hash_length: number): Uint8Array;
poseidon(inputs: Array<any>): Uint8Array;
}
function wasmHasher(hasher: HashStrategy): LightWasmCreator {
const WasmFactory = class implements LightWasm {
blakeHash(input: string | Uint8Array, hashLength: number): Uint8Array {
if (typeof input === "string") {
return hasher.blake2str(input, hashLength);
} else {
return hasher.blake2(input, hashLength);
}
}
poseidonHash(input: string[] | []): Uint8Array {
return hasher.poseidon(stringify(input));
}
poseidonHashBN(input: string[] | []): BN {
return new BN(this.poseidonHash(input));
}
poseidonHashString(input: string[] | []): string {
const bn = new BN(this.poseidonHash(input));
return bn.toString();
}
};
return {
create: () => new WasmFactory(),
};
}
let sisdMemory: Promise<LightWasmCreator> | undefined;
let simdMemory: Promise<LightWasmCreator> | undefined;
const loadWasmSimd = async (module?: InitInput) => {
if (simdMemory === undefined) {
simdMemory = simdInit(module ?? wasmSimdInit?.()).then((x) => {
return wasmHasher({
blake2str: blake2strSimd,
blake2: blake2Simd,
poseidon: poseidonSimd,
});
});
}
if (simdMemory === undefined) {
throw new Error("simdMemory is undefined");
}
return await simdMemory;
};
const loadWasm = async (module?: InitInput) => {
if (sisdMemory === undefined) {
sisdMemory = init(module ?? wasmInit?.()).then((x) => {
return wasmHasher({
blake2str: blake2strWasm,
blake2: blake2Wasm,
poseidon: poseidonWasm,
});
});
}
if (sisdMemory === undefined) {
throw new Error("sisdMemory is undefined");
}
return await sisdMemory;
};
// Extracted from the compiled file of:
// https://github.com/GoogleChromeLabs/wasm-feature-detect/blob/40269813c83f7e9ff370afc92cde3cc0456c557e/src/detectors/simd/module.wat
//
// Changes:
// - Validation is cached, so it needs to only run once
// - There's no need to mark as async
let simdEnabled: boolean | undefined;
export const hasSimd = () =>
simdEnabled ??
(simdEnabled = WebAssembly.validate(
new Uint8Array([
0, 97, 115, 109, 1, 0, 0, 0, 1, 5, 1, 96, 0, 1, 123, 3, 2, 1, 0, 10, 10,
1, 8, 0, 65, 0, 253, 15, 253, 98, 11,
])
));
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/main/model.ts
|
import { BN } from "@coral-xyz/anchor";
export interface LightWasm {
blakeHash(input: string | Uint8Array, hashLength: number): Uint8Array;
poseidonHash(input: string[] | BN[]): Uint8Array;
poseidonHashString(input: string[] | BN[]): string;
poseidonHashBN(input: string[] | BN[]): BN;
}
export interface LightWasmCreator {
create(): LightWasm;
}
/**
* Parameters able to instantiate the Wasm hasher
*/
export type InitInput =
| RequestInfo
| URL
| Response
| BufferSource
| WebAssembly.Module;
export type WasmInput = {
/** parameter that describes how to instantiate the non-SIMD enabled Wasm */
sisd: InitInput;
/** parameter that describes how to instantiate the SIMD enabled Wasm */
simd: InitInput;
};
/**
* Customize how modules are loaded
*/
export interface HasherLoadOptions {
/**
* Execute Hash with SIMD instructions. This option is only
* applicable in a Wasm environment, as native hardware will detect SIMD at
* runtime. `hasher.rs` will detect if Wasm SIMD is enabled if this
* option is not set, so this option is used to override the heuristic.
*/
simd?: boolean;
/**
* Controls how the Wasm module is instantiated. This option is only
* applicable in browser environments or for users that opt to use the Wasm
* hasher. If the `wasm` option is given a single instantiation parameter,
* there is no SIMD check performed.
*/
wasm?: WasmInput | InitInput;
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src
|
solana_public_repos/Lightprotocol/light-protocol/hasher.rs/src/main/index_browser_slim.ts
|
export type { LightWasm, LightWasmCreator, HasherLoadOptions } from "./model.js";
export { WasmFactory, hasSimd as hasWasmSimd } from "./wasm.js";
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/forester/Cargo.toml
|
[package]
name = "forester"
version = "1.1.0"
edition = "2021"
[dependencies]
config = "0.14.0"
anchor-lang = { workspace = true }
clap = {version = "4.5.9", features = ["derive", "env"]}
solana-sdk = { workspace = true }
solana-client = { workspace = true }
solana-transaction-status = { workspace = true }
solana-account-decoder = { workspace = true }
solana-rpc = { workspace = true }
solana-program = { workspace = true }
tiny-bip39 = "0.8.2"
chrono = "0.4.37"
time = "0.3.34"
account-compression = { workspace = true }
light-system-program = { path = "../programs/system", features = ["cpi"] }
light-hash-set = { workspace = true, features = ["solana"] }
light-hasher = { path = "../merkle-tree/hasher" }
light-concurrent-merkle-tree = {path = "../merkle-tree/concurrent"}
light-indexed-merkle-tree = { path = "../merkle-tree/indexed" }
light-merkle-tree-reference = { path = "../merkle-tree/reference" }
light-registry = { workspace = true}
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
tokio = { version = "1", features = ["full"] }
tokio-util = "0.7"
reqwest = { version = "0.11", features = ["json", "rustls-tls", "blocking"] }
futures = "0.3.31"
thiserror = "1"
borsh = "0.10.3"
bs58 = "0.5.1"
photon-api = { workspace=true }
bincode = "1.3"
sysinfo = "0.33"
forester-utils = { workspace=true }
env_logger = "0.11"
rand = "0.8.5"
dotenvy = "0.15.7"
crossbeam-channel = "0.5.12"
tokio-stream = "0.1.16"
base64 = "0.22.0"
async-trait = "0.1.81"
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"] }
tracing-appender = "0.2.3"
prometheus = "0.13"
lazy_static = "1.4"
warp = "0.3"
dashmap = "6.1.0"
scopeguard = "1.2.0"
light-client = { workspace = true }
[dev-dependencies]
function_name = "0.3.0"
serial_test = "3.2.0"
rstest = "0.23.0"
light-prover-client = {path = "../circuit-lib/light-prover-client" }
light-merkle-tree-reference = {path = "../merkle-tree/reference"}
light-hasher = {path = "../merkle-tree/hasher"}
light-bounded-vec = {path = "../merkle-tree/bounded-vec"}
light-test-utils = {path = "../test-utils" }
light-program-test = { workspace = true, features = ["devenv"] }
num-bigint = "0.4"
num-traits = "0.2"
rand = "0.8.5"
once_cell = "1.19.0"
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/forester/Dockerfile
|
FROM rust:1.79 AS builder
WORKDIR /app
RUN apt-get update && apt install -y clang lld
COPY .. .
RUN cargo build --release --package forester
FROM debian:12-slim
RUN apt-get update && apt-get install -y ca-certificates libssl-dev && rm -rf /var/lib/apt/lists/*
RUN mkdir -p /app/config
COPY --from=builder /app/target/release/forester /usr/local/bin/forester
WORKDIR /app
ENTRYPOINT ["/usr/local/bin/forester"]
CMD []
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/forester/Cargo.lock
|
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "Inflector"
version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
dependencies = [
"lazy_static",
"regex",
]
[[package]]
name = "account-compression"
version = "0.3.1"
dependencies = [
"aligned-sized",
"anchor-lang",
"ark-ff",
"ark-serialize",
"borsh 0.10.3",
"bytemuck",
"light-bounded-vec",
"light-concurrent-merkle-tree",
"light-hash-set",
"light-hasher",
"light-heap",
"light-indexed-merkle-tree",
"light-macros",
"light-utils",
"num-bigint 0.4.4",
"num-traits",
"solana-sdk",
]
[[package]]
name = "addr2line"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "aead"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b613b8e1e3cf911a086f53f03bf286f52fd7a7258e4fa606f0ef220d39d8877"
dependencies = [
"generic-array",
]
[[package]]
name = "aes"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8"
dependencies = [
"cfg-if",
"cipher",
"cpufeatures",
"opaque-debug",
]
[[package]]
name = "aes-gcm-siv"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589c637f0e68c877bbd59a4599bbe849cac8e5f3e4b5a3ebae8f528cd218dcdc"
dependencies = [
"aead",
"aes",
"cipher",
"ctr",
"polyval",
"subtle",
"zeroize",
]
[[package]]
name = "ahash"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9"
dependencies = [
"getrandom 0.2.14",
"once_cell",
"version_check",
]
[[package]]
name = "ahash"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"
dependencies = [
"cfg-if",
"getrandom 0.2.14",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
dependencies = [
"memchr",
]
[[package]]
name = "aligned-sized"
version = "0.1.0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "alloc-no-stdlib"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
[[package]]
name = "alloc-stdlib"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
dependencies = [
"alloc-no-stdlib",
]
[[package]]
name = "anchor-attribute-access-control"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5f619f1d04f53621925ba8a2e633ba5a6081f2ae14758cbb67f38fd823e0a3e"
dependencies = [
"anchor-syn",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-attribute-account"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7f2a3e1df4685f18d12a943a9f2a7456305401af21a07c9fe076ef9ecd6e400"
dependencies = [
"anchor-syn",
"bs58 0.5.1",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-attribute-constant"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9423945cb55627f0b30903288e78baf6f62c6c8ab28fb344b6b25f1ffee3dca7"
dependencies = [
"anchor-syn",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-attribute-error"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93ed12720033cc3c3bf3cfa293349c2275cd5ab99936e33dd4bf283aaad3e241"
dependencies = [
"anchor-syn",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-attribute-event"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eef4dc0371eba2d8c8b54794b0b0eb786a234a559b77593d6f80825b6d2c77a2"
dependencies = [
"anchor-syn",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-attribute-program"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b18c4f191331e078d4a6a080954d1576241c29c56638783322a18d308ab27e4f"
dependencies = [
"anchor-syn",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-derive-accounts"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de10d6e9620d3bcea56c56151cad83c5992f50d5960b3a9bebc4a50390ddc3c"
dependencies = [
"anchor-syn",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-derive-serde"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4e2e5be518ec6053d90a2a7f26843dbee607583c779e6c8395951b9739bdfbe"
dependencies = [
"anchor-syn",
"borsh-derive-internal 0.10.3",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-derive-space"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ecc31d19fa54840e74b7a979d44bcea49d70459de846088a1d71e87ba53c419"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "anchor-lang"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35da4785497388af0553586d55ebdc08054a8b1724720ef2749d313494f2b8ad"
dependencies = [
"anchor-attribute-access-control",
"anchor-attribute-account",
"anchor-attribute-constant",
"anchor-attribute-error",
"anchor-attribute-event",
"anchor-attribute-program",
"anchor-derive-accounts",
"anchor-derive-serde",
"anchor-derive-space",
"arrayref",
"base64 0.13.1",
"bincode",
"borsh 0.10.3",
"bytemuck",
"getrandom 0.2.14",
"solana-program",
"thiserror",
]
[[package]]
name = "anchor-syn"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9101b84702fed2ea57bd22992f75065da5648017135b844283a2f6d74f27825"
dependencies = [
"anyhow",
"bs58 0.5.1",
"heck 0.3.3",
"proc-macro2",
"quote",
"serde",
"serde_json",
"sha2 0.10.8",
"syn 1.0.109",
"thiserror",
]
[[package]]
name = "android-tzdata"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
[[package]]
name = "android_system_properties"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
dependencies = [
"libc",
]
[[package]]
name = "ansi_term"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
dependencies = [
"winapi",
]
[[package]]
name = "anstream"
version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
[[package]]
name = "anstyle-parse"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
dependencies = [
"anstyle",
"windows-sys 0.52.0",
]
[[package]]
name = "anyhow"
version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247"
[[package]]
name = "ark-bn254"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f"
dependencies = [
"ark-ec",
"ark-ff",
"ark-std",
]
[[package]]
name = "ark-ec"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "defd9a439d56ac24968cca0571f598a61bc8c55f71d50a89cda591cb750670ba"
dependencies = [
"ark-ff",
"ark-poly",
"ark-serialize",
"ark-std",
"derivative",
"hashbrown 0.13.2",
"itertools",
"num-traits",
"zeroize",
]
[[package]]
name = "ark-ff"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba"
dependencies = [
"ark-ff-asm",
"ark-ff-macros",
"ark-serialize",
"ark-std",
"derivative",
"digest 0.10.7",
"itertools",
"num-bigint 0.4.4",
"num-traits",
"paste",
"rustc_version",
"zeroize",
]
[[package]]
name = "ark-ff-asm"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348"
dependencies = [
"quote",
"syn 1.0.109",
]
[[package]]
name = "ark-ff-macros"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565"
dependencies = [
"num-bigint 0.4.4",
"num-traits",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "ark-poly"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d320bfc44ee185d899ccbadfa8bc31aab923ce1558716e1997a1e74057fe86bf"
dependencies = [
"ark-ff",
"ark-serialize",
"ark-std",
"derivative",
"hashbrown 0.13.2",
]
[[package]]
name = "ark-serialize"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5"
dependencies = [
"ark-serialize-derive",
"ark-std",
"digest 0.10.7",
"num-bigint 0.4.4",
]
[[package]]
name = "ark-serialize-derive"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "ark-std"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185"
dependencies = [
"num-traits",
"rand 0.8.5",
]
[[package]]
name = "arrayref"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545"
[[package]]
name = "arrayvec"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]]
name = "ascii"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eab1c04a571841102f5345a8fc0f6bb3d31c315dec879b5c6e42e40ce7ffa34e"
[[package]]
name = "asn1-rs"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f6fd5ddaf0351dff5b8da21b2fb4ff8e08ddd02857f0bf69c47639106c0fff0"
dependencies = [
"asn1-rs-derive",
"asn1-rs-impl",
"displaydoc",
"nom",
"num-traits",
"rusticata-macros",
"thiserror",
"time",
]
[[package]]
name = "asn1-rs-derive"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"synstructure",
]
[[package]]
name = "asn1-rs-impl"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "assert_matches"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9"
[[package]]
name = "async-channel"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
dependencies = [
"concurrent-queue",
"event-listener",
"futures-core",
]
[[package]]
name = "async-compression"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07dbbf24db18d609b1462965249abdf49129ccad073ec257da372adc83259c60"
dependencies = [
"brotli",
"flate2",
"futures-core",
"memchr",
"pin-project-lite",
"tokio",
]
[[package]]
name = "async-mutex"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e"
dependencies = [
"event-listener",
]
[[package]]
name = "async-trait"
version = "0.1.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi 0.1.19",
"libc",
"winapi",
]
[[package]]
name = "autocfg"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80"
[[package]]
name = "backtrace"
version = "0.3.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "base64"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff"
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64ct"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
[[package]]
name = "bincode"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
]
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
dependencies = [
"serde",
]
[[package]]
name = "bitmaps"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
dependencies = [
"typenum",
]
[[package]]
name = "blake3"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30cca6d3674597c30ddf2c587bf8d9d65c9a84d2326d941cc79c9842dfe0ef52"
dependencies = [
"arrayref",
"arrayvec",
"cc",
"cfg-if",
"constant_time_eq",
"digest 0.10.7",
]
[[package]]
name = "block-buffer"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
dependencies = [
"block-padding",
"generic-array",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "block-padding"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae"
[[package]]
name = "borsh"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15bf3650200d8bffa99015595e10f1fbd17de07abbc25bb067da79e769939bfa"
dependencies = [
"borsh-derive 0.9.3",
"hashbrown 0.11.2",
]
[[package]]
name = "borsh"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b"
dependencies = [
"borsh-derive 0.10.3",
"hashbrown 0.13.2",
]
[[package]]
name = "borsh"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0901fc8eb0aca4c83be0106d6f2db17d86a08dfc2c25f0e84464bf381158add6"
dependencies = [
"borsh-derive 1.4.0",
"cfg_aliases",
]
[[package]]
name = "borsh-derive"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6441c552f230375d18e3cc377677914d2ca2b0d36e52129fe15450a2dce46775"
dependencies = [
"borsh-derive-internal 0.9.3",
"borsh-schema-derive-internal 0.9.3",
"proc-macro-crate 0.1.5",
"proc-macro2",
"syn 1.0.109",
]
[[package]]
name = "borsh-derive"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7"
dependencies = [
"borsh-derive-internal 0.10.3",
"borsh-schema-derive-internal 0.10.3",
"proc-macro-crate 0.1.5",
"proc-macro2",
"syn 1.0.109",
]
[[package]]
name = "borsh-derive"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51670c3aa053938b0ee3bd67c3817e471e626151131b934038e83c5bf8de48f5"
dependencies = [
"once_cell",
"proc-macro-crate 3.1.0",
"proc-macro2",
"quote",
"syn 2.0.58",
"syn_derive",
]
[[package]]
name = "borsh-derive-internal"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5449c28a7b352f2d1e592a8a28bf139bc71afb0764a14f3c02500935d8c44065"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "borsh-derive-internal"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "borsh-schema-derive-internal"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdbd5696d8bfa21d53d9fe39a714a18538bad11492a42d066dbbc395fb1951c0"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "borsh-schema-derive-internal"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "brotli"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "125740193d7fee5cc63ab9e16c2fdc4e07c74ba755cc53b327d6ea029e9fc569"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor",
]
[[package]]
name = "brotli-decompressor"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65622a320492e09b5e0ac436b14c54ff68199bac392d0e89a6832c4518eea525"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
]
[[package]]
name = "bs58"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3"
[[package]]
name = "bs58"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4"
dependencies = [
"tinyvec",
]
[[package]]
name = "bumpalo"
version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bv"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8834bb1d8ee5dc048ee3124f2c7c1afcc6bc9aed03f11e9dfd8c69470a5db340"
dependencies = [
"feature-probe",
"serde",
]
[[package]]
name = "bytemuck"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15"
dependencies = [
"bytemuck_derive",
]
[[package]]
name = "bytemuck_derive"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4da9a32f3fed317401fa3c862968128267c3106685286e15d5aaa3d7389c2f60"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9"
[[package]]
name = "caps"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "190baaad529bcfbde9e1a19022c42781bdb6ff9de25721abdb8fd98c0807730b"
dependencies = [
"libc",
"thiserror",
]
[[package]]
name = "cc"
version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2678b2e3449475e95b0aa6f9b506a28e61b3dc8996592b983695e8ebb58a8b41"
dependencies = [
"jobserver",
"libc",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cfg_aliases"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
[[package]]
name = "chrono"
version = "0.4.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e"
dependencies = [
"android-tzdata",
"iana-time-zone",
"js-sys",
"num-traits",
"serde",
"wasm-bindgen",
"windows-targets 0.52.4",
]
[[package]]
name = "cipher"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7"
dependencies = [
"generic-array",
]
[[package]]
name = "clap"
version = "2.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
dependencies = [
"ansi_term",
"atty",
"bitflags 1.3.2",
"strsim 0.8.0",
"textwrap 0.11.0",
"unicode-width",
"vec_map",
]
[[package]]
name = "clap"
version = "3.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123"
dependencies = [
"atty",
"bitflags 1.3.2",
"clap_lex 0.2.4",
"indexmap 1.9.3",
"once_cell",
"strsim 0.10.0",
"termcolor",
"textwrap 0.16.1",
]
[[package]]
name = "clap"
version = "4.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0"
dependencies = [
"clap_builder",
"clap_derive",
]
[[package]]
name = "clap_builder"
version = "4.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4"
dependencies = [
"anstream",
"anstyle",
"clap_lex 0.7.0",
"strsim 0.11.1",
]
[[package]]
name = "clap_derive"
version = "4.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "clap_lex"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
dependencies = [
"os_str_bytes",
]
[[package]]
name = "clap_lex"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce"
[[package]]
name = "colorchoice"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "combine"
version = "3.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da3da6baa321ec19e1cc41d31bf599f00c783d0517095cdaf0332e3fe8d20680"
dependencies = [
"ascii",
"byteorder",
"either",
"memchr",
"unreachable",
]
[[package]]
name = "concurrent-queue"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "config"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7328b20597b53c2454f0b1919720c25c7339051c02b72b7e05409e00b14132be"
dependencies = [
"async-trait",
"convert_case",
"json5",
"lazy_static",
"nom",
"pathdiff",
"ron",
"rust-ini",
"serde",
"serde_json",
"toml 0.8.12",
"yaml-rust",
]
[[package]]
name = "console"
version = "0.15.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
dependencies = [
"encode_unicode",
"lazy_static",
"libc",
"unicode-width",
"windows-sys 0.52.0",
]
[[package]]
name = "console_error_panic_hook"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc"
dependencies = [
"cfg-if",
"wasm-bindgen",
]
[[package]]
name = "console_log"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e89f72f65e8501878b8a004d5a1afb780987e2ce2b4532c562e367a72c57499f"
dependencies = [
"log",
"web-sys",
]
[[package]]
name = "const-oid"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4c78c047431fee22c1a7bb92e00ad095a02a983affe4d8a72e2a2c62c1b94f3"
[[package]]
name = "const-random"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359"
dependencies = [
"const-random-macro",
]
[[package]]
name = "const-random-macro"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e"
dependencies = [
"getrandom 0.2.14",
"once_cell",
"tiny-keccak",
]
[[package]]
name = "constant_time_eq"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2"
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "core-foundation"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]]
name = "core-foundation-sys"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f"
[[package]]
name = "cpufeatures"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
dependencies = [
"libc",
]
[[package]]
name = "crank"
version = "0.1.0"
dependencies = [
"account-compression",
"anchor-lang",
"chrono",
"clap 4.5.4",
"config",
"function_name",
"futures",
"light-compressed-pda",
"light-hash-set",
"reqwest",
"rstest",
"serde",
"serde_json",
"serial_test",
"solana-client",
"solana-sdk",
"thiserror",
"time",
"tiny-bip39",
"tokio",
]
[[package]]
name = "crc32fast"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa"
dependencies = [
"cfg-if",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
[[package]]
name = "crunchy"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "crypto-mac"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab"
dependencies = [
"generic-array",
"subtle",
]
[[package]]
name = "ctr"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea"
dependencies = [
"cipher",
]
[[package]]
name = "curve25519-dalek"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90f9d052967f590a76e62eb387bd0bbb1b000182c3cefe5364db6b7211651bc0"
dependencies = [
"byteorder",
"digest 0.9.0",
"rand_core 0.5.1",
"serde",
"subtle",
"zeroize",
]
[[package]]
name = "darling"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391"
dependencies = [
"darling_core",
"darling_macro",
]
[[package]]
name = "darling_core"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim 0.10.0",
"syn 2.0.58",
]
[[package]]
name = "darling_macro"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f"
dependencies = [
"darling_core",
"quote",
"syn 2.0.58",
]
[[package]]
name = "dashmap"
version = "5.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
"hashbrown 0.14.3",
"lock_api",
"once_cell",
"parking_lot_core",
]
[[package]]
name = "data-encoding"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
[[package]]
name = "der"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6919815d73839e7ad218de758883aae3a257ba6759ce7a9992501efbb53d705c"
dependencies = [
"const-oid",
]
[[package]]
name = "der-parser"
version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbd676fbbab537128ef0278adb5576cf363cff6aa22a7b24effe97347cfab61e"
dependencies = [
"asn1-rs",
"displaydoc",
"nom",
"num-bigint 0.4.4",
"num-traits",
"rusticata-macros",
]
[[package]]
name = "deranged"
version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
dependencies = [
"powerfmt",
]
[[package]]
name = "derivation-path"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e5c37193a1db1d8ed868c03ec7b152175f26160a5b740e5e484143877e0adf0"
[[package]]
name = "derivative"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "dialoguer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59c6f2989294b9a498d3ad5491a79c6deb604617378e1cdc4bfc1c1361fe2f87"
dependencies = [
"console",
"shell-words",
"tempfile",
"zeroize",
]
[[package]]
name = "digest"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
dependencies = [
"generic-array",
]
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer 0.10.4",
"crypto-common",
"subtle",
]
[[package]]
name = "displaydoc"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "dlopen2"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09b4f5f101177ff01b8ec4ecc81eead416a8aa42819a2869311b3420fa114ffa"
dependencies = [
"dlopen2_derive",
"libc",
"once_cell",
"winapi",
]
[[package]]
name = "dlopen2_derive"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cbae11b3de8fce2a456e8ea3dada226b35fe791f0dc1d360c0941f0bb681f3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "dlv-list"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f"
dependencies = [
"const-random",
]
[[package]]
name = "eager"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abe71d579d1812060163dff96056261deb5bf6729b100fa2e36a68b9649ba3d3"
[[package]]
name = "ed25519"
version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7"
dependencies = [
"signature",
]
[[package]]
name = "ed25519-dalek"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d"
dependencies = [
"curve25519-dalek",
"ed25519",
"rand 0.7.3",
"serde",
"sha2 0.9.9",
"zeroize",
]
[[package]]
name = "ed25519-dalek-bip32"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d2be62a4061b872c8c0873ee4fc6f101ce7b889d039f019c5fa2af471a59908"
dependencies = [
"derivation-path",
"ed25519-dalek",
"hmac 0.12.1",
"sha2 0.10.8",
]
[[package]]
name = "either"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a"
[[package]]
name = "encode_unicode"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]]
name = "encoding_rs"
version = "0.8.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
dependencies = [
"cfg-if",
]
[[package]]
name = "enum-iterator"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fd242f399be1da0a5354aa462d57b4ab2b4ee0683cc552f7c007d2d12d36e94"
dependencies = [
"enum-iterator-derive",
]
[[package]]
name = "enum-iterator-derive"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03cdc46ec28bd728e67540c528013c6a10eb69a02eb31078a1bda695438cbfb8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "env_logger"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
dependencies = [
"atty",
"humantime",
"log",
"regex",
"termcolor",
]
[[package]]
name = "equivalent"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "errno"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "event-listener"
version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
[[package]]
name = "fastrand"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984"
[[package]]
name = "feature-probe"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "835a3dc7d1ec9e75e2b5fb4ba75396837112d2060b03f7d43bc1897c7f7211da"
[[package]]
name = "flate2"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"
dependencies = [
"crc32fast",
"miniz_oxide",
]
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foreign-types"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
"foreign-types-shared",
]
[[package]]
name = "foreign-types-shared"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
dependencies = [
"percent-encoding",
]
[[package]]
name = "function_name"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1ab577a896d09940b5fe12ec5ae71f9d8211fff62c919c03a3750a9901e98a7"
dependencies = [
"function_name-proc-macro",
]
[[package]]
name = "function_name-proc-macro"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673464e1e314dd67a0fd9544abc99e8eb28d0c7e3b69b033bcff9b2d00b87333"
[[package]]
name = "futures"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-channel"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78"
dependencies = [
"futures-core",
"futures-sink",
]
[[package]]
name = "futures-core"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
[[package]]
name = "futures-executor"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-io"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
[[package]]
name = "futures-macro"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "futures-sink"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
[[package]]
name = "futures-task"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
[[package]]
name = "futures-timer"
version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24"
[[package]]
name = "futures-util"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
dependencies = [
"futures-channel",
"futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task",
"memchr",
"pin-project-lite",
"pin-utils",
"slab",
]
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"serde",
"typenum",
"version_check",
]
[[package]]
name = "gethostname"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "getrandom"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
"wasm-bindgen",
]
[[package]]
name = "getrandom"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
"wasm-bindgen",
]
[[package]]
name = "gimli"
version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "goblin"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7666983ed0dd8d21a6f6576ee00053ca0926fb281a5522577a4dbd0f1b54143"
dependencies = [
"log",
"plain",
"scroll",
]
[[package]]
name = "groth16-solana"
version = "0.0.2"
source = "git+https://github.com/Lightprotocol/groth16-solana.git?branch=master#d3e474d11d27f6ba2b0c8c1917a9170b200e3b24"
dependencies = [
"ark-bn254",
"ark-ec",
"ark-ff",
"ark-serialize",
"solana-program",
"thiserror",
]
[[package]]
name = "h2"
version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8"
dependencies = [
"bytes",
"fnv",
"futures-core",
"futures-sink",
"futures-util",
"http",
"indexmap 2.2.6",
"slab",
"tokio",
"tokio-util",
"tracing",
]
[[package]]
name = "hash32"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67"
dependencies = [
"byteorder",
]
[[package]]
name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
dependencies = [
"ahash 0.7.8",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
dependencies = [
"ahash 0.8.11",
]
[[package]]
name = "hashbrown"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
[[package]]
name = "hermit-abi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
[[package]]
name = "histogram"
version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12cb882ccb290b8646e554b157ab0b71e64e8d5bef775cd66b6531e52d302669"
[[package]]
name = "hmac"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840"
dependencies = [
"crypto-mac",
"digest 0.9.0",
]
[[package]]
name = "hmac"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
dependencies = [
"digest 0.10.7",
]
[[package]]
name = "hmac-drbg"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1"
dependencies = [
"digest 0.9.0",
"generic-array",
"hmac 0.8.1",
]
[[package]]
name = "http"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "http-body"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
dependencies = [
"bytes",
"http",
"pin-project-lite",
]
[[package]]
name = "httparse"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
[[package]]
name = "httpdate"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hyper"
version = "0.14.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80"
dependencies = [
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"h2",
"http",
"http-body",
"httparse",
"httpdate",
"itoa",
"pin-project-lite",
"socket2",
"tokio",
"tower-service",
"tracing",
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
dependencies = [
"futures-util",
"http",
"hyper",
"rustls",
"tokio",
"tokio-rustls",
]
[[package]]
name = "hyper-tls"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
dependencies = [
"bytes",
"hyper",
"native-tls",
"tokio",
"tokio-native-tls",
]
[[package]]
name = "iana-time-zone"
version = "0.1.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"wasm-bindgen",
"windows-core",
]
[[package]]
name = "iana-time-zone-haiku"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
dependencies = [
"cc",
]
[[package]]
name = "ident_case"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
dependencies = [
"unicode-bidi",
"unicode-normalization",
]
[[package]]
name = "im"
version = "15.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9"
dependencies = [
"bitmaps",
"rand_core 0.6.4",
"rand_xoshiro",
"rayon",
"serde",
"sized-chunks",
"typenum",
"version_check",
]
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown 0.12.3",
]
[[package]]
name = "indexmap"
version = "2.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
dependencies = [
"equivalent",
"hashbrown 0.14.3",
]
[[package]]
name = "indicatif"
version = "0.17.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
dependencies = [
"console",
"instant",
"number_prefix",
"portable-atomic",
"unicode-width",
]
[[package]]
name = "instant"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if",
]
[[package]]
name = "ipnet"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3"
[[package]]
name = "itertools"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
[[package]]
name = "jobserver"
version = "0.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6"
dependencies = [
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
dependencies = [
"wasm-bindgen",
]
[[package]]
name = "json5"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1"
dependencies = [
"pest",
"pest_derive",
"serde",
]
[[package]]
name = "jsonrpc-core"
version = "18.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb"
dependencies = [
"futures",
"futures-executor",
"futures-util",
"log",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "keccak"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654"
dependencies = [
"cpufeatures",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.153"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]]
name = "libsecp256k1"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9d220bc1feda2ac231cb78c3d26f27676b8cf82c96971f7aeef3d0cf2797c73"
dependencies = [
"arrayref",
"base64 0.12.3",
"digest 0.9.0",
"hmac-drbg",
"libsecp256k1-core",
"libsecp256k1-gen-ecmult",
"libsecp256k1-gen-genmult",
"rand 0.7.3",
"serde",
"sha2 0.9.9",
"typenum",
]
[[package]]
name = "libsecp256k1-core"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0f6ab710cec28cef759c5f18671a27dae2a5f952cdaaee1d8e2908cb2478a80"
dependencies = [
"crunchy",
"digest 0.9.0",
"subtle",
]
[[package]]
name = "libsecp256k1-gen-ecmult"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3"
dependencies = [
"libsecp256k1-core",
]
[[package]]
name = "libsecp256k1-gen-genmult"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d"
dependencies = [
"libsecp256k1-core",
]
[[package]]
name = "light-bounded-vec"
version = "0.1.0"
dependencies = [
"bytemuck",
"solana-program",
"thiserror",
]
[[package]]
name = "light-compressed-pda"
version = "0.3.0"
dependencies = [
"account-compression",
"aligned-sized",
"anchor-lang",
"bytemuck",
"groth16-solana",
"light-concurrent-merkle-tree",
"light-hasher",
"light-heap",
"light-macros",
"light-utils",
"solana-sdk",
]
[[package]]
name = "light-concurrent-merkle-tree"
version = "0.1.0"
dependencies = [
"borsh 0.10.3",
"bytemuck",
"light-bounded-vec",
"light-hasher",
"memoffset 0.8.0",
"solana-program",
"thiserror",
]
[[package]]
name = "light-hash-set"
version = "0.1.0"
dependencies = [
"light-bounded-vec",
"light-utils",
"memoffset 0.9.1",
"num-bigint 0.4.4",
"num-traits",
"solana-program",
"thiserror",
]
[[package]]
name = "light-hasher"
version = "0.1.0"
dependencies = [
"ark-bn254",
"light-poseidon",
"sha2 0.10.8",
"sha3 0.10.8",
"solana-program",
"thiserror",
]
[[package]]
name = "light-heap"
version = "0.1.0"
dependencies = [
"anchor-lang",
]
[[package]]
name = "light-indexed-merkle-tree"
version = "0.1.0"
dependencies = [
"ark-ff",
"borsh 0.10.3",
"light-bounded-vec",
"light-concurrent-merkle-tree",
"light-hasher",
"light-merkle-tree-reference",
"light-utils",
"num-bigint 0.4.4",
"num-traits",
"solana-program",
"thiserror",
]
[[package]]
name = "light-macros"
version = "0.3.1"
dependencies = [
"bs58 0.4.0",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "light-merkle-tree-reference"
version = "0.1.0"
dependencies = [
"light-bounded-vec",
"light-hasher",
"thiserror",
]
[[package]]
name = "light-poseidon"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c9a85a9752c549ceb7578064b4ed891179d20acd85f27318573b64d2d7ee7ee"
dependencies = [
"ark-bn254",
"ark-ff",
"num-bigint 0.4.4",
"thiserror",
]
[[package]]
name = "light-utils"
version = "0.1.0"
dependencies = [
"anyhow",
"ark-bn254",
"ark-ff",
"num-bigint 0.4.4",
"solana-program",
"thiserror",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "linux-raw-sys"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
[[package]]
name = "lock_api"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
dependencies = [
"autocfg",
"scopeguard",
]
[[package]]
name = "log"
version = "0.4.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
[[package]]
name = "memchr"
version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
[[package]]
name = "memmap2"
version = "0.5.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327"
dependencies = [
"libc",
]
[[package]]
name = "memoffset"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a"
dependencies = [
"autocfg",
]
[[package]]
name = "merlin"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58c38e2799fc0978b65dfff8023ec7843e2330bb462f19198840b34b6582397d"
dependencies = [
"byteorder",
"keccak",
"rand_core 0.6.4",
"zeroize",
]
[[package]]
name = "mime"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7"
dependencies = [
"adler",
]
[[package]]
name = "mio"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
dependencies = [
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys 0.48.0",
]
[[package]]
name = "native-tls"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e"
dependencies = [
"lazy_static",
"libc",
"log",
"openssl",
"openssl-probe",
"openssl-sys",
"schannel",
"security-framework",
"security-framework-sys",
"tempfile",
]
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"libc",
"memoffset 0.7.1",
"pin-utils",
]
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "num"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8536030f9fea7127f841b45bb6243b27255787fb4eb83958aa1ef9d2fdc0c36"
dependencies = [
"num-bigint 0.2.6",
"num-complex",
"num-integer",
"num-iter",
"num-rational",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
"rand 0.8.5",
]
[[package]]
name = "num-complex"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6b19411a9719e753aff12e5187b74d60d3dc449ec3f4dc21e3989c3f554bc95"
dependencies = [
"autocfg",
"num-traits",
]
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-derive"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "num-derive"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d869c01cc0c455284163fd0092f1f93835385ccab5a98a0dcc497b2f8bf055a9"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef"
dependencies = [
"autocfg",
"num-bigint 0.2.6",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
dependencies = [
"autocfg",
]
[[package]]
name = "num_cpus"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
"hermit-abi 0.3.9",
"libc",
]
[[package]]
name = "num_enum"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1"
dependencies = [
"num_enum_derive 0.6.1",
]
[[package]]
name = "num_enum"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845"
dependencies = [
"num_enum_derive 0.7.2",
]
[[package]]
name = "num_enum_derive"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6"
dependencies = [
"proc-macro-crate 1.3.1",
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "num_enum_derive"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b"
dependencies = [
"proc-macro-crate 3.1.0",
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "number_prefix"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
[[package]]
name = "object"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
dependencies = [
"memchr",
]
[[package]]
name = "oid-registry"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bedf36ffb6ba96c2eb7144ef6270557b52e54b20c0a8e1eb2ff99a6c6959bff"
dependencies = [
"asn1-rs",
]
[[package]]
name = "once_cell"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "opaque-debug"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
[[package]]
name = "openssl"
version = "0.10.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f"
dependencies = [
"bitflags 2.5.0",
"cfg-if",
"foreign-types",
"libc",
"once_cell",
"openssl-macros",
"openssl-sys",
]
[[package]]
name = "openssl-macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "openssl-probe"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
version = "0.9.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "ordered-multimap"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ed8acf08e98e744e5384c8bc63ceb0364e68a6854187221c18df61c4797690e"
dependencies = [
"dlv-list",
"hashbrown 0.13.2",
]
[[package]]
name = "os_str_bytes"
version = "6.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1"
[[package]]
name = "parking_lot"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-targets 0.48.5",
]
[[package]]
name = "paste"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
[[package]]
name = "pathdiff"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
[[package]]
name = "pbkdf2"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "216eaa586a190f0a738f2f918511eecfa90f13295abec0e457cdebcceda80cbd"
dependencies = [
"crypto-mac",
]
[[package]]
name = "pbkdf2"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917"
dependencies = [
"digest 0.10.7",
]
[[package]]
name = "pem"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8"
dependencies = [
"base64 0.13.1",
]
[[package]]
name = "percent-encoding"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "percentage"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd23b938276f14057220b707937bcb42fa76dda7560e57a2da30cb52d557937"
dependencies = [
"num",
]
[[package]]
name = "pest"
version = "2.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "311fb059dee1a7b802f036316d790138c613a4e8b180c822e3925a662e9f0c95"
dependencies = [
"memchr",
"thiserror",
"ucd-trie",
]
[[package]]
name = "pest_derive"
version = "2.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f73541b156d32197eecda1a4014d7f868fd2bcb3c550d5386087cfba442bf69c"
dependencies = [
"pest",
"pest_generator",
]
[[package]]
name = "pest_generator"
version = "2.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c35eeed0a3fab112f75165fdc026b3913f4183133f19b49be773ac9ea966e8bd"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "pest_meta"
version = "2.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2adbf29bb9776f28caece835398781ab24435585fe0d4dc1374a61db5accedca"
dependencies = [
"once_cell",
"pest",
"sha2 0.10.8",
]
[[package]]
name = "pin-project-lite"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02"
[[package]]
name = "pin-utils"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkcs8"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cabda3fb821068a9a4fab19a683eac3af12edf0f34b94a8be53c4972b8149d0"
dependencies = [
"der",
"spki",
"zeroize",
]
[[package]]
name = "pkg-config"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
[[package]]
name = "plain"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
[[package]]
name = "polyval"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8419d2b623c7c0896ff2d5d96e2cb4ede590fed28fcc34934f4c33c036e620a1"
dependencies = [
"cfg-if",
"cpufeatures",
"opaque-debug",
"universal-hash",
]
[[package]]
name = "portable-atomic"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
[[package]]
name = "powerfmt"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]]
name = "ppv-lite86"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "proc-macro-crate"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785"
dependencies = [
"toml 0.5.11",
]
[[package]]
name = "proc-macro-crate"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919"
dependencies = [
"once_cell",
"toml_edit 0.19.15",
]
[[package]]
name = "proc-macro-crate"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284"
dependencies = [
"toml_edit 0.21.1",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e"
dependencies = [
"unicode-ident",
]
[[package]]
name = "qstring"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d464fae65fff2680baf48019211ce37aaec0c78e9264c84a3e484717f965104e"
dependencies = [
"percent-encoding",
]
[[package]]
name = "qualifier_attr"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e2e25ee72f5b24d773cae88422baddefff7714f97aab68d96fe2b6fc4a28fb2"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "quinn"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cc2c5017e4b43d5995dcea317bc46c1e09404c0a9664d2908f7f02dfe943d75"
dependencies = [
"bytes",
"pin-project-lite",
"quinn-proto",
"quinn-udp",
"rustc-hash",
"rustls",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "quinn-proto"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "141bf7dfde2fbc246bfd3fe12f2455aa24b0fbd9af535d8c86c7bd1381ff2b1a"
dependencies = [
"bytes",
"rand 0.8.5",
"ring 0.16.20",
"rustc-hash",
"rustls",
"rustls-native-certs",
"slab",
"thiserror",
"tinyvec",
"tracing",
]
[[package]]
name = "quinn-udp"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "055b4e778e8feb9f93c4e439f71dc2156ef13360b432b799e179a8c4cdf0b1d7"
dependencies = [
"bytes",
"libc",
"socket2",
"tracing",
"windows-sys 0.48.0",
]
[[package]]
name = "quote"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom 0.1.16",
"libc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
"rand_hc",
]
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha 0.3.1",
"rand_core 0.6.4",
]
[[package]]
name = "rand_chacha"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
dependencies = [
"ppv-lite86",
"rand_core 0.5.1",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core 0.6.4",
]
[[package]]
name = "rand_core"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom 0.1.16",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom 0.2.14",
]
[[package]]
name = "rand_hc"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
dependencies = [
"rand_core 0.5.1",
]
[[package]]
name = "rand_xoshiro"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
dependencies = [
"rand_core 0.6.4",
]
[[package]]
name = "rayon"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "rcgen"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbe84efe2f38dea12e9bfc1f65377fdf03e53a18cb3b995faedf7934c7e785b"
dependencies = [
"pem",
"ring 0.16.20",
"time",
"yasna",
]
[[package]]
name = "redox_syscall"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "regex"
version = "1.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
[[package]]
name = "relative-path"
version = "1.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e898588f33fdd5b9420719948f9f2a32c922a246964576f71ba7f24f80610fbc"
[[package]]
name = "reqwest"
version = "0.11.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62"
dependencies = [
"async-compression",
"base64 0.21.7",
"bytes",
"encoding_rs",
"futures-core",
"futures-util",
"h2",
"http",
"http-body",
"hyper",
"hyper-rustls",
"hyper-tls",
"ipnet",
"js-sys",
"log",
"mime",
"native-tls",
"once_cell",
"percent-encoding",
"pin-project-lite",
"rustls",
"rustls-pemfile",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper",
"system-configuration",
"tokio",
"tokio-native-tls",
"tokio-rustls",
"tokio-util",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"webpki-roots 0.25.4",
"winreg",
]
[[package]]
name = "ring"
version = "0.16.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
dependencies = [
"cc",
"libc",
"once_cell",
"spin 0.5.2",
"untrusted 0.7.1",
"web-sys",
"winapi",
]
[[package]]
name = "ring"
version = "0.17.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d"
dependencies = [
"cc",
"cfg-if",
"getrandom 0.2.14",
"libc",
"spin 0.9.8",
"untrusted 0.9.0",
"windows-sys 0.52.0",
]
[[package]]
name = "ron"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94"
dependencies = [
"base64 0.21.7",
"bitflags 2.5.0",
"serde",
"serde_derive",
]
[[package]]
name = "rpassword"
version = "7.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f"
dependencies = [
"libc",
"rtoolbox",
"windows-sys 0.48.0",
]
[[package]]
name = "rstest"
version = "0.18.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97eeab2f3c0a199bc4be135c36c924b6590b88c377d416494288c14f2db30199"
dependencies = [
"futures",
"futures-timer",
"rstest_macros",
"rustc_version",
]
[[package]]
name = "rstest_macros"
version = "0.18.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d428f8247852f894ee1be110b375111b586d4fa431f6c46e64ba5a0dcccbe605"
dependencies = [
"cfg-if",
"glob",
"proc-macro2",
"quote",
"regex",
"relative-path",
"rustc_version",
"syn 2.0.58",
"unicode-ident",
]
[[package]]
name = "rtoolbox"
version = "0.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e"
dependencies = [
"libc",
"windows-sys 0.48.0",
]
[[package]]
name = "rust-ini"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e2a3bcec1f113553ef1c88aae6c020a369d03d55b58de9869a0908930385091"
dependencies = [
"cfg-if",
"ordered-multimap",
]
[[package]]
name = "rustc-demangle"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver",
]
[[package]]
name = "rusticata-macros"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632"
dependencies = [
"nom",
]
[[package]]
name = "rustix"
version = "0.38.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
dependencies = [
"bitflags 2.5.0",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",
]
[[package]]
name = "rustls"
version = "0.21.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
dependencies = [
"log",
"ring 0.17.8",
"rustls-webpki",
"sct",
]
[[package]]
name = "rustls-native-certs"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00"
dependencies = [
"openssl-probe",
"rustls-pemfile",
"schannel",
"security-framework",
]
[[package]]
name = "rustls-pemfile"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
dependencies = [
"base64 0.21.7",
]
[[package]]
name = "rustls-webpki"
version = "0.101.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
dependencies = [
"ring 0.17.8",
"untrusted 0.9.0",
]
[[package]]
name = "rustversion"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47"
[[package]]
name = "ryu"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "schannel"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534"
dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "scroll"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da"
dependencies = [
"scroll_derive",
]
[[package]]
name = "scroll_derive"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1db149f81d46d2deba7cd3c50772474707729550221e69588478ebf9ada425ae"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "sct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
"ring 0.17.8",
"untrusted 0.9.0",
]
[[package]]
name = "security-framework"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6"
dependencies = [
"bitflags 1.3.2",
"core-foundation",
"core-foundation-sys",
"libc",
"security-framework-sys",
]
[[package]]
name = "security-framework-sys"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]]
name = "semver"
version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca"
[[package]]
name = "serde"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_bytes"
version = "0.11.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734"
dependencies = [
"serde",
]
[[package]]
name = "serde_derive"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "serde_json"
version = "1.0.115"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "serde_spanned"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1"
dependencies = [
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
dependencies = [
"form_urlencoded",
"itoa",
"ryu",
"serde",
]
[[package]]
name = "serde_with"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07ff71d2c147a7b57362cead5e22f772cd52f6ab31cfcd9edcd7f6aeb2a0afbe"
dependencies = [
"serde",
"serde_with_macros",
]
[[package]]
name = "serde_with_macros"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f"
dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "serial_test"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d"
dependencies = [
"dashmap",
"futures",
"lazy_static",
"log",
"parking_lot",
"serial_test_derive",
]
[[package]]
name = "serial_test_derive"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "sha1"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
"cpufeatures",
"digest 0.10.7",
]
[[package]]
name = "sha2"
version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800"
dependencies = [
"block-buffer 0.9.0",
"cfg-if",
"cpufeatures",
"digest 0.9.0",
"opaque-debug",
]
[[package]]
name = "sha2"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
"digest 0.10.7",
]
[[package]]
name = "sha3"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809"
dependencies = [
"block-buffer 0.9.0",
"digest 0.9.0",
"keccak",
"opaque-debug",
]
[[package]]
name = "sha3"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60"
dependencies = [
"digest 0.10.7",
"keccak",
]
[[package]]
name = "shell-words"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
[[package]]
name = "signal-hook-registry"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
dependencies = [
"libc",
]
[[package]]
name = "signature"
version = "1.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c"
[[package]]
name = "siphasher"
version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
[[package]]
name = "sized-chunks"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
dependencies = [
"bitmaps",
"typenum",
]
[[package]]
name = "slab"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
dependencies = [
"autocfg",
]
[[package]]
name = "smallvec"
version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "socket2"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "solana-account-decoder"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "142161f13c328e7807fe98fb8f6eaaa5045a8eaf4492414aa81254870c4fc8a0"
dependencies = [
"Inflector",
"base64 0.21.7",
"bincode",
"bs58 0.4.0",
"bv",
"lazy_static",
"serde",
"serde_derive",
"serde_json",
"solana-config-program",
"solana-sdk",
"spl-token",
"spl-token-2022",
"spl-token-group-interface",
"spl-token-metadata-interface",
"thiserror",
"zstd",
]
[[package]]
name = "solana-clap-utils"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8e9f61034a61db538a41700b6df0b4b9f0392038adaf780150481923ff94356"
dependencies = [
"chrono",
"clap 2.34.0",
"rpassword",
"solana-remote-wallet",
"solana-sdk",
"thiserror",
"tiny-bip39",
"uriparse",
"url",
]
[[package]]
name = "solana-client"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13f2bd5a986d7cac1b4ffb4344413b70b6f21fd7ffa92a985911756b4ac7682a"
dependencies = [
"async-trait",
"bincode",
"dashmap",
"futures",
"futures-util",
"indexmap 2.2.6",
"indicatif",
"log",
"quinn",
"rayon",
"solana-connection-cache",
"solana-measure",
"solana-metrics",
"solana-pubsub-client",
"solana-quic-client",
"solana-rpc-client",
"solana-rpc-client-api",
"solana-rpc-client-nonce-utils",
"solana-sdk",
"solana-streamer",
"solana-thin-client",
"solana-tpu-client",
"solana-udp-client",
"thiserror",
"tokio",
]
[[package]]
name = "solana-config-program"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "970d28779e92a11e32a89ee453edc7d89394d3a68d8c4b75ef0ffb833944c588"
dependencies = [
"bincode",
"chrono",
"serde",
"serde_derive",
"solana-program-runtime",
"solana-sdk",
]
[[package]]
name = "solana-connection-cache"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd7d0022ded19dca32ced5528c6a050596877fc8b9a89322d876960a89466e1b"
dependencies = [
"async-trait",
"bincode",
"crossbeam-channel",
"futures-util",
"indexmap 2.2.6",
"log",
"rand 0.8.5",
"rayon",
"rcgen",
"solana-measure",
"solana-metrics",
"solana-sdk",
"thiserror",
"tokio",
]
[[package]]
name = "solana-frozen-abi"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35a0b24cc4d0ebd5fd45d6bd47bed3790f8a75ade67af8ff24a3d719a8bc93bc"
dependencies = [
"block-buffer 0.10.4",
"bs58 0.4.0",
"bv",
"either",
"generic-array",
"im",
"lazy_static",
"log",
"memmap2",
"rustc_version",
"serde",
"serde_bytes",
"serde_derive",
"sha2 0.10.8",
"solana-frozen-abi-macro",
"subtle",
"thiserror",
]
[[package]]
name = "solana-frozen-abi-macro"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51600f4066d3663ab2981fd24e77a8c2e65f5d20ea71b550b853ca9ae40eee7f"
dependencies = [
"proc-macro2",
"quote",
"rustc_version",
"syn 2.0.58",
]
[[package]]
name = "solana-logger"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd79ef26804612173c95be8da84df3128d648173cf1f746de8f183ec8dbedd92"
dependencies = [
"env_logger",
"lazy_static",
"log",
]
[[package]]
name = "solana-measure"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "300f716a5f1c2f4b562fb008a0cc7d7c0d889cff802a7f8177fdf28772ae1ed9"
dependencies = [
"log",
"solana-sdk",
]
[[package]]
name = "solana-metrics"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abf1705d52e4f123856725e1b3842cd4928b954ff62391a95af142a5adc58ac6"
dependencies = [
"crossbeam-channel",
"gethostname",
"lazy_static",
"log",
"reqwest",
"solana-sdk",
"thiserror",
]
[[package]]
name = "solana-net-utils"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f2634fd50743e2ca075e663e07b0bd5c2f94db0ac320ce5bc2022e0002d82d"
dependencies = [
"bincode",
"clap 3.2.25",
"crossbeam-channel",
"log",
"nix",
"rand 0.8.5",
"serde",
"serde_derive",
"socket2",
"solana-logger",
"solana-sdk",
"solana-version",
"tokio",
"url",
]
[[package]]
name = "solana-perf"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0264d7093d44c239d9eb41beb6877b7b1eea5ad8809c93c1d9ab0c840ba390"
dependencies = [
"ahash 0.8.11",
"bincode",
"bv",
"caps",
"curve25519-dalek",
"dlopen2",
"fnv",
"lazy_static",
"libc",
"log",
"nix",
"rand 0.8.5",
"rayon",
"rustc_version",
"serde",
"solana-frozen-abi",
"solana-frozen-abi-macro",
"solana-metrics",
"solana-rayon-threadlimit",
"solana-sdk",
"solana-vote-program",
]
[[package]]
name = "solana-program"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a5513a02d622ba89e76baf4b49d25ae20c2c2c623fced12b0d6dd7b8f23e006"
dependencies = [
"ark-bn254",
"ark-ec",
"ark-ff",
"ark-serialize",
"base64 0.21.7",
"bincode",
"bitflags 2.5.0",
"blake3",
"borsh 0.10.3",
"borsh 0.9.3",
"borsh 1.4.0",
"bs58 0.4.0",
"bv",
"bytemuck",
"cc",
"console_error_panic_hook",
"console_log",
"curve25519-dalek",
"getrandom 0.2.14",
"itertools",
"js-sys",
"lazy_static",
"libc",
"libsecp256k1",
"light-poseidon",
"log",
"memoffset 0.9.1",
"num-bigint 0.4.4",
"num-derive 0.4.2",
"num-traits",
"parking_lot",
"rand 0.8.5",
"rustc_version",
"rustversion",
"serde",
"serde_bytes",
"serde_derive",
"serde_json",
"sha2 0.10.8",
"sha3 0.10.8",
"solana-frozen-abi",
"solana-frozen-abi-macro",
"solana-sdk-macro",
"thiserror",
"tiny-bip39",
"wasm-bindgen",
"zeroize",
]
[[package]]
name = "solana-program-runtime"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64dc9f666a8e4f93166ce58eea9dfbf275e5cad461b2f1bbfa06538718dc3212"
dependencies = [
"base64 0.21.7",
"bincode",
"eager",
"enum-iterator",
"itertools",
"libc",
"log",
"num-derive 0.4.2",
"num-traits",
"percentage",
"rand 0.8.5",
"rustc_version",
"serde",
"solana-frozen-abi",
"solana-frozen-abi-macro",
"solana-measure",
"solana-metrics",
"solana-sdk",
"solana_rbpf",
"thiserror",
]
[[package]]
name = "solana-pubsub-client"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ffdcbdad685b87475a91909fdb442d2edfabc2870110580c7f0cf7eb7883f97"
dependencies = [
"crossbeam-channel",
"futures-util",
"log",
"reqwest",
"semver",
"serde",
"serde_derive",
"serde_json",
"solana-account-decoder",
"solana-rpc-client-api",
"solana-sdk",
"thiserror",
"tokio",
"tokio-stream",
"tokio-tungstenite",
"tungstenite",
"url",
]
[[package]]
name = "solana-quic-client"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "056e909037b05097d2ff0181cb7e3d26876d8dff6d50701463a61e990cf84afd"
dependencies = [
"async-mutex",
"async-trait",
"futures",
"itertools",
"lazy_static",
"log",
"quinn",
"quinn-proto",
"rcgen",
"rustls",
"solana-connection-cache",
"solana-measure",
"solana-metrics",
"solana-net-utils",
"solana-rpc-client-api",
"solana-sdk",
"solana-streamer",
"thiserror",
"tokio",
]
[[package]]
name = "solana-rayon-threadlimit"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e93a5e1ef891dca2cca907f7196b6a5d3b80af4183f2be0f981906b16711ff5d"
dependencies = [
"lazy_static",
"num_cpus",
]
[[package]]
name = "solana-remote-wallet"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52c06eaf47d9a98ba22e890e68868f5d48c91e01268c541a53b5960288b617d6"
dependencies = [
"console",
"dialoguer",
"log",
"num-derive 0.4.2",
"num-traits",
"parking_lot",
"qstring",
"semver",
"solana-sdk",
"thiserror",
"uriparse",
]
[[package]]
name = "solana-rpc-client"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed1d4b6f1f4e3dab7509401e85edc1c1ac208c61819de90178e01cf162c9c051"
dependencies = [
"async-trait",
"base64 0.21.7",
"bincode",
"bs58 0.4.0",
"indicatif",
"log",
"reqwest",
"semver",
"serde",
"serde_derive",
"serde_json",
"solana-account-decoder",
"solana-rpc-client-api",
"solana-sdk",
"solana-transaction-status",
"solana-version",
"solana-vote-program",
"tokio",
]
[[package]]
name = "solana-rpc-client-api"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a31feddef24d3e0aab189571adea7f109639ef6179fcd3cd34ffc8c73d3409f1"
dependencies = [
"base64 0.21.7",
"bs58 0.4.0",
"jsonrpc-core",
"reqwest",
"semver",
"serde",
"serde_derive",
"serde_json",
"solana-account-decoder",
"solana-sdk",
"solana-transaction-status",
"solana-version",
"spl-token-2022",
"thiserror",
]
[[package]]
name = "solana-rpc-client-nonce-utils"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1837728262063723c659e4b8c0acf0baa99cd38cb333511456465d2c9e654474"
dependencies = [
"clap 2.34.0",
"solana-clap-utils",
"solana-rpc-client",
"solana-sdk",
"thiserror",
]
[[package]]
name = "solana-sdk"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f50cac89269a01235f6b421bc580132191f4df388f4265513e78fd00cf864dd"
dependencies = [
"assert_matches",
"base64 0.21.7",
"bincode",
"bitflags 2.5.0",
"borsh 1.4.0",
"bs58 0.4.0",
"bytemuck",
"byteorder",
"chrono",
"derivation-path",
"digest 0.10.7",
"ed25519-dalek",
"ed25519-dalek-bip32",
"generic-array",
"hmac 0.12.1",
"itertools",
"js-sys",
"lazy_static",
"libsecp256k1",
"log",
"memmap2",
"num-derive 0.4.2",
"num-traits",
"num_enum 0.7.2",
"pbkdf2 0.11.0",
"qstring",
"qualifier_attr",
"rand 0.7.3",
"rand 0.8.5",
"rustc_version",
"rustversion",
"serde",
"serde_bytes",
"serde_derive",
"serde_json",
"serde_with",
"sha2 0.10.8",
"sha3 0.10.8",
"siphasher",
"solana-frozen-abi",
"solana-frozen-abi-macro",
"solana-logger",
"solana-program",
"solana-sdk-macro",
"thiserror",
"uriparse",
"wasm-bindgen",
]
[[package]]
name = "solana-sdk-macro"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cb099b2f9c0a65a6f23ced791325141cd68c27b04d11c04fef838a00f613861"
dependencies = [
"bs58 0.4.0",
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.58",
]
[[package]]
name = "solana-security-txt"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "468aa43b7edb1f9b7b7b686d5c3aeb6630dc1708e86e31343499dd5c4d775183"
[[package]]
name = "solana-streamer"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8a20843e8370adb3c04f47caa79ffdc92ae1bf078ad26530be1bca5d7bdd5d2"
dependencies = [
"async-channel",
"bytes",
"crossbeam-channel",
"futures-util",
"histogram",
"indexmap 2.2.6",
"itertools",
"libc",
"log",
"nix",
"pem",
"percentage",
"pkcs8",
"quinn",
"quinn-proto",
"rand 0.8.5",
"rcgen",
"rustls",
"smallvec",
"solana-metrics",
"solana-perf",
"solana-sdk",
"thiserror",
"tokio",
"x509-parser",
]
[[package]]
name = "solana-thin-client"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c74da8f36b89b28c47e5ba3bad5279ff3dfea5829154882845d4821fc76ff497"
dependencies = [
"bincode",
"log",
"rayon",
"solana-connection-cache",
"solana-rpc-client",
"solana-rpc-client-api",
"solana-sdk",
]
[[package]]
name = "solana-tpu-client"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0f2fd4b4aeffa14b9c5be9913072ea8e72ca261254a65a999f3d2fd70e7a660"
dependencies = [
"async-trait",
"bincode",
"futures-util",
"indexmap 2.2.6",
"indicatif",
"log",
"rayon",
"solana-connection-cache",
"solana-measure",
"solana-metrics",
"solana-pubsub-client",
"solana-rpc-client",
"solana-rpc-client-api",
"solana-sdk",
"thiserror",
"tokio",
]
[[package]]
name = "solana-transaction-status"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3efa0d30f78dbc74e795638b053dd6ec7230739301e7f0e06b586f7731fd25c8"
dependencies = [
"Inflector",
"base64 0.21.7",
"bincode",
"borsh 0.10.3",
"bs58 0.4.0",
"lazy_static",
"log",
"serde",
"serde_derive",
"serde_json",
"solana-account-decoder",
"solana-sdk",
"spl-associated-token-account",
"spl-memo",
"spl-token",
"spl-token-2022",
"thiserror",
]
[[package]]
name = "solana-udp-client"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32af58cadd37be19d04e0f3877104b8640bccc4be8ca1dbf431549b399b784c2"
dependencies = [
"async-trait",
"solana-connection-cache",
"solana-net-utils",
"solana-sdk",
"solana-streamer",
"thiserror",
"tokio",
]
[[package]]
name = "solana-version"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42c7cef8aa9f1c633bf09dd91b8e635b6b30c40236652031b1800b245dc1bd02"
dependencies = [
"log",
"rustc_version",
"semver",
"serde",
"serde_derive",
"solana-frozen-abi",
"solana-frozen-abi-macro",
"solana-sdk",
]
[[package]]
name = "solana-vote-program"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "725a39044d455c08fe83fca758e94e5ddfaa25f6e2e2cfd5c31d7afdcad8de38"
dependencies = [
"bincode",
"log",
"num-derive 0.4.2",
"num-traits",
"rustc_version",
"serde",
"serde_derive",
"solana-frozen-abi",
"solana-frozen-abi-macro",
"solana-metrics",
"solana-program",
"solana-program-runtime",
"solana-sdk",
"thiserror",
]
[[package]]
name = "solana-zk-token-sdk"
version = "1.18.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "630dc0b5f6250cf6a4c8b2bd3895283738915e83eba5453db20bb02b2527f302"
dependencies = [
"aes-gcm-siv",
"base64 0.21.7",
"bincode",
"bytemuck",
"byteorder",
"curve25519-dalek",
"getrandom 0.1.16",
"itertools",
"lazy_static",
"merlin",
"num-derive 0.4.2",
"num-traits",
"rand 0.7.3",
"serde",
"serde_json",
"sha3 0.9.1",
"solana-program",
"solana-sdk",
"subtle",
"thiserror",
"zeroize",
]
[[package]]
name = "solana_rbpf"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d457cc2ba742c120492a64b7fa60e22c575e891f6b55039f4d736568fb112a3"
dependencies = [
"byteorder",
"combine",
"goblin",
"hash32",
"libc",
"log",
"rand 0.8.5",
"rustc-demangle",
"scroll",
"thiserror",
"winapi",
]
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "spin"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
[[package]]
name = "spki"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d01ac02a6ccf3e07db148d2be087da624fea0221a16152ed01f0496a6b0a27"
dependencies = [
"base64ct",
"der",
]
[[package]]
name = "spl-associated-token-account"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "992d9c64c2564cc8f63a4b508bf3ebcdf2254b0429b13cd1d31adb6162432a5f"
dependencies = [
"assert_matches",
"borsh 0.10.3",
"num-derive 0.4.2",
"num-traits",
"solana-program",
"spl-token",
"spl-token-2022",
"thiserror",
]
[[package]]
name = "spl-discriminator"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cce5d563b58ef1bb2cdbbfe0dfb9ffdc24903b10ae6a4df2d8f425ece375033f"
dependencies = [
"bytemuck",
"solana-program",
"spl-discriminator-derive",
]
[[package]]
name = "spl-discriminator-derive"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07fd7858fc4ff8fb0e34090e41d7eb06a823e1057945c26d480bfc21d2338a93"
dependencies = [
"quote",
"spl-discriminator-syn",
"syn 2.0.58",
]
[[package]]
name = "spl-discriminator-syn"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18fea7be851bd98d10721782ea958097c03a0c2a07d8d4997041d0ece6319a63"
dependencies = [
"proc-macro2",
"quote",
"sha2 0.10.8",
"syn 2.0.58",
"thiserror",
]
[[package]]
name = "spl-memo"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0f180b03318c3dbab3ef4e1e4d46d5211ae3c780940dd0a28695aba4b59a75a"
dependencies = [
"solana-program",
]
[[package]]
name = "spl-pod"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2881dddfca792737c0706fa0175345ab282b1b0879c7d877bad129645737c079"
dependencies = [
"borsh 0.10.3",
"bytemuck",
"solana-program",
"solana-zk-token-sdk",
"spl-program-error",
]
[[package]]
name = "spl-program-error"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "249e0318493b6bcf27ae9902600566c689b7dfba9f1bdff5893e92253374e78c"
dependencies = [
"num-derive 0.4.2",
"num-traits",
"solana-program",
"spl-program-error-derive",
"thiserror",
]
[[package]]
name = "spl-program-error-derive"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1845dfe71fd68f70382232742e758557afe973ae19e6c06807b2c30f5d5cb474"
dependencies = [
"proc-macro2",
"quote",
"sha2 0.10.8",
"syn 2.0.58",
]
[[package]]
name = "spl-tlv-account-resolution"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "615d381f48ddd2bb3c57c7f7fb207591a2a05054639b18a62e785117dd7a8683"
dependencies = [
"bytemuck",
"solana-program",
"spl-discriminator",
"spl-pod",
"spl-program-error",
"spl-type-length-value",
]
[[package]]
name = "spl-token"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08459ba1b8f7c1020b4582c4edf0f5c7511a5e099a7a97570c9698d4f2337060"
dependencies = [
"arrayref",
"bytemuck",
"num-derive 0.3.3",
"num-traits",
"num_enum 0.6.1",
"solana-program",
"thiserror",
]
[[package]]
name = "spl-token-2022"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d697fac19fd74ff472dfcc13f0b442dd71403178ce1de7b5d16f83a33561c059"
dependencies = [
"arrayref",
"bytemuck",
"num-derive 0.4.2",
"num-traits",
"num_enum 0.7.2",
"solana-program",
"solana-security-txt",
"solana-zk-token-sdk",
"spl-memo",
"spl-pod",
"spl-token",
"spl-token-group-interface",
"spl-token-metadata-interface",
"spl-transfer-hook-interface",
"spl-type-length-value",
"thiserror",
]
[[package]]
name = "spl-token-group-interface"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b889509d49fa74a4a033ca5dae6c2307e9e918122d97e58562f5c4ffa795c75d"
dependencies = [
"bytemuck",
"solana-program",
"spl-discriminator",
"spl-pod",
"spl-program-error",
]
[[package]]
name = "spl-token-metadata-interface"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c16ce3ba6979645fb7627aa1e435576172dd63088dc7848cb09aa331fa1fe4f"
dependencies = [
"borsh 0.10.3",
"solana-program",
"spl-discriminator",
"spl-pod",
"spl-program-error",
"spl-type-length-value",
]
[[package]]
name = "spl-transfer-hook-interface"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7aabdb7c471566f6ddcee724beb8618449ea24b399e58d464d6b5bc7db550259"
dependencies = [
"arrayref",
"bytemuck",
"solana-program",
"spl-discriminator",
"spl-pod",
"spl-program-error",
"spl-tlv-account-resolution",
"spl-type-length-value",
]
[[package]]
name = "spl-type-length-value"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a468e6f6371f9c69aae760186ea9f1a01c2908351b06a5e0026d21cfc4d7ecac"
dependencies = [
"bytemuck",
"solana-program",
"spl-discriminator",
"spl-pod",
"spl-program-error",
]
[[package]]
name = "strsim"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
[[package]]
name = "strsim"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subtle"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn_derive"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b"
dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "sync_wrapper"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-xid",
]
[[package]]
name = "system-configuration"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
dependencies = [
"bitflags 1.3.2",
"core-foundation",
"system-configuration-sys",
]
[[package]]
name = "system-configuration-sys"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]]
name = "tempfile"
version = "3.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
dependencies = [
"cfg-if",
"fastrand",
"rustix",
"windows-sys 0.52.0",
]
[[package]]
name = "termcolor"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
dependencies = [
"winapi-util",
]
[[package]]
name = "textwrap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
dependencies = [
"unicode-width",
]
[[package]]
name = "textwrap"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9"
[[package]]
name = "thiserror"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "time"
version = "0.3.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749"
dependencies = [
"deranged",
"itoa",
"num-conv",
"powerfmt",
"serde",
"time-core",
"time-macros",
]
[[package]]
name = "time-core"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
[[package]]
name = "time-macros"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774"
dependencies = [
"num-conv",
"time-core",
]
[[package]]
name = "tiny-bip39"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffc59cb9dfc85bb312c3a78fd6aa8a8582e310b0fa885d5bb877f6dcc601839d"
dependencies = [
"anyhow",
"hmac 0.8.1",
"once_cell",
"pbkdf2 0.4.0",
"rand 0.7.3",
"rustc-hash",
"sha2 0.9.9",
"thiserror",
"unicode-normalization",
"wasm-bindgen",
"zeroize",
]
[[package]]
name = "tiny-keccak"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
dependencies = [
"crunchy",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.37.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787"
dependencies = [
"backtrace",
"bytes",
"libc",
"mio",
"num_cpus",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
"socket2",
"tokio-macros",
"windows-sys 0.48.0",
]
[[package]]
name = "tokio-macros"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "tokio-native-tls"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
dependencies = [
"native-tls",
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
dependencies = [
"rustls",
"tokio",
]
[[package]]
name = "tokio-stream"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "tokio-tungstenite"
version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c"
dependencies = [
"futures-util",
"log",
"rustls",
"tokio",
"tokio-rustls",
"tungstenite",
"webpki-roots 0.25.4",
]
[[package]]
name = "tokio-util"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15"
dependencies = [
"bytes",
"futures-core",
"futures-sink",
"pin-project-lite",
"tokio",
"tracing",
]
[[package]]
name = "toml"
version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
dependencies = [
"serde",
]
[[package]]
name = "toml"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit 0.22.12",
]
[[package]]
name = "toml_datetime"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.19.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [
"indexmap 2.2.6",
"toml_datetime",
"winnow 0.5.40",
]
[[package]]
name = "toml_edit"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1"
dependencies = [
"indexmap 2.2.6",
"toml_datetime",
"winnow 0.5.40",
]
[[package]]
name = "toml_edit"
version = "0.22.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3328d4f68a705b2a4498da1d580585d39a6510f98318a2cec3018a7ec61ddef"
dependencies = [
"indexmap 2.2.6",
"serde",
"serde_spanned",
"toml_datetime",
"winnow 0.6.7",
]
[[package]]
name = "tower-service"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
[[package]]
name = "tracing"
version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
"log",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "tracing-core"
version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
dependencies = [
"once_cell",
]
[[package]]
name = "try-lock"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "tungstenite"
version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
dependencies = [
"byteorder",
"bytes",
"data-encoding",
"http",
"httparse",
"log",
"rand 0.8.5",
"rustls",
"sha1",
"thiserror",
"url",
"utf-8",
"webpki-roots 0.24.0",
]
[[package]]
name = "typenum"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ucd-trie"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
[[package]]
name = "unicode-bidi"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-normalization"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5"
dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
[[package]]
name = "unicode-width"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "universal-hash"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05"
dependencies = [
"generic-array",
"subtle",
]
[[package]]
name = "unreachable"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
dependencies = [
"void",
]
[[package]]
name = "untrusted"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "untrusted"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "uriparse"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0200d0fc04d809396c2ad43f3c95da3582a2556eba8d453c1087f4120ee352ff"
dependencies = [
"fnv",
"lazy_static",
]
[[package]]
name = "url"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
]
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "utf8parse"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "vec_map"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "void"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
[[package]]
name = "want"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
dependencies = [
"try-lock",
]
[[package]]
name = "wasi"
version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.58",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0"
dependencies = [
"cfg-if",
"js-sys",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
[[package]]
name = "web-sys"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "webpki-roots"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888"
dependencies = [
"rustls-webpki",
]
[[package]]
name = "webpki-roots"
version = "0.25.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-core"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
dependencies = [
"windows-targets 0.52.4",
]
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.4",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
"windows_aarch64_gnullvm 0.48.5",
"windows_aarch64_msvc 0.48.5",
"windows_i686_gnu 0.48.5",
"windows_i686_msvc 0.48.5",
"windows_x86_64_gnu 0.48.5",
"windows_x86_64_gnullvm 0.48.5",
"windows_x86_64_msvc 0.48.5",
]
[[package]]
name = "windows-targets"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b"
dependencies = [
"windows_aarch64_gnullvm 0.52.4",
"windows_aarch64_msvc 0.52.4",
"windows_i686_gnu 0.52.4",
"windows_i686_msvc 0.52.4",
"windows_x86_64_gnu 0.52.4",
"windows_x86_64_gnullvm 0.52.4",
"windows_x86_64_msvc 0.52.4",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675"
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3"
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
[[package]]
name = "winnow"
version = "0.5.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876"
dependencies = [
"memchr",
]
[[package]]
name = "winnow"
version = "0.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14b9415ee827af173ebb3f15f9083df5a122eb93572ec28741fb153356ea2578"
dependencies = [
"memchr",
]
[[package]]
name = "winreg"
version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
dependencies = [
"cfg-if",
"windows-sys 0.48.0",
]
[[package]]
name = "x509-parser"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0ecbeb7b67ce215e40e3cc7f2ff902f94a223acf44995934763467e7b1febc8"
dependencies = [
"asn1-rs",
"base64 0.13.1",
"data-encoding",
"der-parser",
"lazy_static",
"nom",
"oid-registry",
"rusticata-macros",
"thiserror",
"time",
]
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "yasna"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd"
dependencies = [
"time",
]
[[package]]
name = "zerocopy"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "zeroize"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd"
dependencies = [
"zeroize_derive",
]
[[package]]
name = "zeroize_derive"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.58",
]
[[package]]
name = "zstd"
version = "0.11.2+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4"
dependencies = [
"zstd-safe",
]
[[package]]
name = "zstd-safe"
version = "5.0.2+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db"
dependencies = [
"libc",
"zstd-sys",
]
[[package]]
name = "zstd-sys"
version = "2.0.10+zstd.1.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa"
dependencies = [
"cc",
"pkg-config",
]
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/forester/.dockerignore
|
**/*.log
.git/
**/test-ledger/
**/logs/
target
**/target/
node_modules
**/node_modules
Dockerfile
.dockerignore
.git
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/forester/package.json
|
{
"name": "@lightprotocol/forester",
"version": "0.3.0",
"license": "GPL-3.0",
"scripts": {
"build": "cargo build",
"test": "RUSTFLAGS=\"--cfg tokio_unstable -D warnings\" cargo test --package forester -- --test-threads=1 --nocapture",
"docker:build": "docker build --tag forester -f Dockerfile .."
},
"devDependencies": {
"@lightprotocol/zk-compression-cli": "workspace:*"
},
"nx": {
"targets": {
"build": {
"outputs": [
"{workspaceRoot}/target/release"
]
}
}
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/forester/.env.example
|
export FORESTER_RPC_URL="http://localhost:8899"
export FORESTER_WS_RPC_URL="ws://localhost:8900"
export FORESTER_INDEXER_URL="http://localhost:8784"
export FORESTER_PROVER_URL="http://localhost:3001"
export FORESTER_PUSH_GATEWAY_URL="http://localhost:9092/metrics/job/forester"
export FORESTER_PAYER=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64]
export PHOTON_API_KEY="00000000-0000-0000-0000-000000000000"
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/tests/e2e_test.rs
|
use account_compression::utils::constants::{ADDRESS_QUEUE_VALUES, STATE_NULLIFIER_QUEUE_VALUES};
use account_compression::AddressMerkleTreeAccount;
use forester::queue_helpers::fetch_queue_item_data;
use forester::run_pipeline;
use forester::utils::get_protocol_config;
use forester_utils::indexer::{AddressMerkleTreeAccounts, StateMerkleTreeAccounts};
use forester_utils::registry::register_test_forester;
use light_client::rpc::solana_rpc::SolanaRpcUrl;
use light_client::rpc::{RpcConnection, RpcError, SolanaRpcConnection};
use light_client::rpc_pool::SolanaRpcPool;
use light_program_test::test_env::EnvAccounts;
use light_prover_client::gnark::helpers::{LightValidatorConfig, ProverConfig, ProverMode};
use light_registry::utils::{get_epoch_pda_address, get_forester_epoch_pda_from_authority};
use light_registry::{EpochPda, ForesterEpochPda};
use light_test_utils::e2e_test_env::E2ETestEnv;
use light_test_utils::indexer::TestIndexer;
use light_test_utils::update_test_forester;
use solana_sdk::commitment_config::CommitmentConfig;
use solana_sdk::native_token::LAMPORTS_PER_SOL;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::signature::Keypair;
use solana_sdk::signer::Signer;
use std::collections::HashSet;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::{mpsc, oneshot, Mutex};
use tokio::time::{sleep, timeout};
mod test_utils;
use test_utils::*;
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
#[ignore]
async fn test_epoch_monitor_with_test_indexer_and_1_forester() {
init(Some(LightValidatorConfig {
enable_indexer: false,
wait_time: 10,
prover_config: Some(ProverConfig {
run_mode: Some(ProverMode::ForesterTest),
circuits: vec![],
}),
}))
.await;
let forester_keypair = Keypair::new();
let mut env_accounts = EnvAccounts::get_local_test_validator_accounts();
env_accounts.forester = forester_keypair.insecure_clone();
let mut config = forester_config();
config.payer_keypair = forester_keypair.insecure_clone();
let pool = SolanaRpcPool::<SolanaRpcConnection>::new(
config.external_services.rpc_url.to_string(),
CommitmentConfig::confirmed(),
config.general_config.rpc_pool_size as u32,
)
.await
.unwrap();
let mut rpc = SolanaRpcConnection::new(SolanaRpcUrl::Localnet, None);
rpc.payer = forester_keypair.insecure_clone();
rpc.airdrop_lamports(&forester_keypair.pubkey(), LAMPORTS_PER_SOL * 100_000)
.await
.unwrap();
rpc.airdrop_lamports(
&env_accounts.governance_authority.pubkey(),
LAMPORTS_PER_SOL * 100_000,
)
.await
.unwrap();
register_test_forester(
&mut rpc,
&env_accounts.governance_authority,
&forester_keypair.pubkey(),
light_registry::ForesterConfig::default(),
)
.await
.unwrap();
let new_forester_keypair = Keypair::new();
rpc.airdrop_lamports(&new_forester_keypair.pubkey(), LAMPORTS_PER_SOL * 100_000)
.await
.unwrap();
update_test_forester(
&mut rpc,
&forester_keypair,
&forester_keypair.pubkey(),
Some(&new_forester_keypair),
light_registry::ForesterConfig::default(),
)
.await
.unwrap();
config.derivation_pubkey = forester_keypair.pubkey();
config.payer_keypair = new_forester_keypair.insecure_clone();
let config = Arc::new(config);
let indexer: TestIndexer<SolanaRpcConnection> =
TestIndexer::init_from_env(&config.payer_keypair, &env_accounts, None).await;
let mut env = E2ETestEnv::<SolanaRpcConnection, TestIndexer<SolanaRpcConnection>>::new(
rpc,
indexer,
&env_accounts,
keypair_action_config(),
general_action_config(),
0,
Some(0),
)
.await;
let user_index = 0;
let balance = env
.rpc
.get_balance(&env.users[user_index].keypair.pubkey())
.await
.unwrap();
env.compress_sol(user_index, balance).await;
let state_trees: Vec<StateMerkleTreeAccounts> = env
.indexer
.state_merkle_trees
.iter()
.map(|x| x.accounts)
.collect();
let address_trees: Vec<AddressMerkleTreeAccounts> = env
.indexer
.address_merkle_trees
.iter()
.map(|x| x.accounts)
.collect();
let iterations = 1;
let mut total_expected_work = 0;
// Create work and assert that the queues are not empty
{
for _ in 0..iterations {
env.transfer_sol(user_index).await;
env.create_address(None, None).await;
}
// Asserting non-empty because transfer sol is not deterministic.
assert_queue_len(
&pool,
&state_trees,
&address_trees,
&mut total_expected_work,
iterations,
true,
)
.await;
}
let (shutdown_sender, shutdown_receiver) = oneshot::channel();
let (work_report_sender, mut work_report_receiver) = mpsc::channel(100);
// Run the forester as pipeline
let service_handle = tokio::spawn(run_pipeline(
config.clone(),
Arc::new(Mutex::new(env.indexer)),
shutdown_receiver,
work_report_sender,
));
if work_report_receiver.recv().await.is_some() {
println!("work_reported");
};
let mut rpc = pool.get_connection().await.unwrap();
let epoch_pda_address = get_epoch_pda_address(0);
let epoch_pda = (*rpc)
.get_anchor_account::<EpochPda>(&epoch_pda_address)
.await
.unwrap()
.unwrap();
let total_processed = epoch_pda.total_work;
let forester_epoch_pda_address =
get_forester_epoch_pda_from_authority(&config.derivation_pubkey, 0).0;
let forester_epoch_pda = (*rpc)
.get_anchor_account::<ForesterEpochPda>(&forester_epoch_pda_address)
.await
.unwrap()
.unwrap();
assert_eq!(forester_epoch_pda.work_counter, total_processed);
// assert that all (2) queues have been emptied
{
assert_queue_len(
&pool,
&state_trees.clone(),
&address_trees.clone(),
&mut 0,
0,
false,
)
.await;
assert_eq!(
total_processed, total_expected_work,
"Not all items were processed."
);
}
shutdown_sender
.send(())
.expect("Failed to send shutdown signal");
service_handle.await.unwrap().unwrap();
}
pub async fn assert_queue_len(
pool: &SolanaRpcPool<SolanaRpcConnection>,
state_trees: &[StateMerkleTreeAccounts],
address_trees: &[AddressMerkleTreeAccounts],
total_expected_work: &mut u64,
expected_len: usize,
not_empty: bool,
) {
for tree in state_trees.iter() {
let mut rpc = pool.get_connection().await.unwrap();
let queue_length = fetch_queue_item_data(
&mut *rpc,
&tree.nullifier_queue,
0,
STATE_NULLIFIER_QUEUE_VALUES,
STATE_NULLIFIER_QUEUE_VALUES,
)
.await
.unwrap()
.len();
if not_empty {
assert_ne!(queue_length, 0);
} else {
assert_eq!(queue_length, expected_len);
}
*total_expected_work += queue_length as u64;
}
for tree in address_trees.iter() {
let mut rpc = pool.get_connection().await.unwrap();
let queue_length = fetch_queue_item_data(
&mut *rpc,
&tree.queue,
0,
ADDRESS_QUEUE_VALUES,
ADDRESS_QUEUE_VALUES,
)
.await
.unwrap()
.len();
if not_empty {
assert_ne!(queue_length, 0);
} else {
assert_eq!(queue_length, expected_len);
}
*total_expected_work += queue_length as u64;
}
}
// TODO: add test which asserts epoch registration over many epochs (we need a different protocol config for that)
// TODO: add test with photon indexer for an infinite local test which performs work over many epochs
#[tokio::test(flavor = "multi_thread", worker_threads = 32)]
async fn test_epoch_monitor_with_2_foresters() {
init(Some(LightValidatorConfig {
enable_indexer: false,
wait_time: 15,
prover_config: Some(ProverConfig {
run_mode: Some(ProverMode::ForesterTest),
circuits: vec![],
}),
}))
.await;
let forester_keypair1 = Keypair::new();
let forester_keypair2 = Keypair::new();
let mut env_accounts = EnvAccounts::get_local_test_validator_accounts();
env_accounts.forester = forester_keypair1.insecure_clone();
let mut config1 = forester_config();
config1.payer_keypair = forester_keypair1.insecure_clone();
let mut config2 = forester_config();
config2.payer_keypair = forester_keypair2.insecure_clone();
let pool = SolanaRpcPool::<SolanaRpcConnection>::new(
config1.external_services.rpc_url.to_string(),
CommitmentConfig::confirmed(),
config1.general_config.rpc_pool_size as u32,
)
.await
.unwrap();
let mut rpc = SolanaRpcConnection::new(SolanaRpcUrl::Localnet, None);
rpc.payer = forester_keypair1.insecure_clone();
// Airdrop to both foresters and governance authority
for keypair in [
&forester_keypair1,
&forester_keypair2,
&env_accounts.governance_authority,
] {
rpc.airdrop_lamports(&keypair.pubkey(), LAMPORTS_PER_SOL * 100_000)
.await
.unwrap();
}
// Register both foresters
for forester_keypair in [&forester_keypair1, &forester_keypair2] {
register_test_forester(
&mut rpc,
&env_accounts.governance_authority,
&forester_keypair.pubkey(),
light_registry::ForesterConfig::default(),
)
.await
.unwrap();
}
let new_forester_keypair1 = Keypair::new();
let new_forester_keypair2 = Keypair::new();
for forester_keypair in [&new_forester_keypair1, &new_forester_keypair2] {
rpc.airdrop_lamports(&forester_keypair.pubkey(), LAMPORTS_PER_SOL * 100_000)
.await
.unwrap();
}
update_test_forester(
&mut rpc,
&forester_keypair1,
&forester_keypair1.pubkey(),
Some(&new_forester_keypair1),
light_registry::ForesterConfig::default(),
)
.await
.unwrap();
update_test_forester(
&mut rpc,
&forester_keypair2,
&forester_keypair2.pubkey(),
Some(&new_forester_keypair2),
light_registry::ForesterConfig::default(),
)
.await
.unwrap();
config1.derivation_pubkey = forester_keypair1.pubkey();
config1.payer_keypair = new_forester_keypair1.insecure_clone();
config2.derivation_pubkey = forester_keypair2.pubkey();
config2.payer_keypair = new_forester_keypair2.insecure_clone();
let config1 = Arc::new(config1);
let config2 = Arc::new(config2);
let indexer: TestIndexer<SolanaRpcConnection> =
TestIndexer::init_from_env(&config1.payer_keypair, &env_accounts, None).await;
let mut env = E2ETestEnv::<SolanaRpcConnection, TestIndexer<SolanaRpcConnection>>::new(
rpc,
indexer,
&env_accounts,
keypair_action_config(),
general_action_config(),
0,
Some(0),
)
.await;
let user_index = 0;
let balance = env
.rpc
.get_balance(&env.users[user_index].keypair.pubkey())
.await
.unwrap();
env.compress_sol(user_index, balance).await;
// Create state and address trees which can be rolled over
env.create_address_tree(Some(0)).await;
env.create_state_tree(Some(0)).await;
let state_tree_with_rollover_threshold_0 =
env.indexer.state_merkle_trees[1].accounts.merkle_tree;
let address_tree_with_rollover_threshold_0 =
env.indexer.address_merkle_trees[1].accounts.merkle_tree;
let state_trees: Vec<StateMerkleTreeAccounts> = env
.indexer
.state_merkle_trees
.iter()
.map(|x| x.accounts)
.collect();
let address_trees: Vec<AddressMerkleTreeAccounts> = env
.indexer
.address_merkle_trees
.iter()
.map(|x| x.accounts)
.collect();
println!("Address trees: {:?}", address_trees);
// Two rollovers plus other work
let mut total_expected_work = 2;
{
let iterations = 5;
for i in 0..iterations {
println!("Round {} of {}", i, iterations);
let user_keypair = env.users[0].keypair.insecure_clone();
env.transfer_sol_deterministic(&user_keypair, &user_keypair.pubkey(), Some(1))
.await
.unwrap();
env.transfer_sol_deterministic(&user_keypair, &user_keypair.pubkey().clone(), Some(0))
.await
.unwrap();
sleep(Duration::from_millis(100)).await;
env.create_address(None, Some(1)).await;
env.create_address(None, Some(0)).await;
}
assert_queue_len(
&pool,
&state_trees,
&address_trees,
&mut total_expected_work,
0,
true,
)
.await;
}
let (shutdown_sender1, shutdown_receiver1) = oneshot::channel();
let (shutdown_sender2, shutdown_receiver2) = oneshot::channel();
let (work_report_sender1, mut work_report_receiver1) = mpsc::channel(100);
let (work_report_sender2, mut work_report_receiver2) = mpsc::channel(100);
let indexer = Arc::new(Mutex::new(env.indexer));
let service_handle1 = tokio::spawn(run_pipeline(
config1.clone(),
indexer.clone(),
shutdown_receiver1,
work_report_sender1,
));
let service_handle2 = tokio::spawn(run_pipeline(
config2.clone(),
indexer,
shutdown_receiver2,
work_report_sender2,
));
// Wait for both foresters to report work for epoch 1
const TIMEOUT_DURATION: Duration = Duration::from_secs(360);
const EXPECTED_EPOCHS: u64 = 2; // We expect to process 2 epochs (0 and 1)
let result: Result<(), tokio::time::error::Elapsed> = timeout(TIMEOUT_DURATION, async {
let mut processed_epochs = HashSet::new();
let mut total_processed = 0;
while processed_epochs.len() < EXPECTED_EPOCHS as usize {
tokio::select! {
Some(report) = work_report_receiver1.recv() => {
println!("Received work report from forester 1: {:?}", report);
total_processed += report.processed_items;
processed_epochs.insert(report.epoch);
}
Some(report) = work_report_receiver2.recv() => {
println!("Received work report from forester 2: {:?}", report);
total_processed += report.processed_items;
processed_epochs.insert(report.epoch);
}
else => break,
}
}
println!("Processed {} items", total_processed);
// Verify that we've processed the expected number of epochs
assert_eq!(
processed_epochs.len(),
EXPECTED_EPOCHS as usize,
"Processed {} epochs, expected {}",
processed_epochs.len(),
EXPECTED_EPOCHS
);
// Verify that we've processed epochs 0 and 1
assert!(processed_epochs.contains(&0), "Epoch 0 was not processed");
assert!(processed_epochs.contains(&1), "Epoch 1 was not processed");
})
.await;
// Handle timeout
if result.is_err() {
panic!("Test timed out after {:?}", TIMEOUT_DURATION);
}
assert_trees_are_rolledover(
&pool,
&state_tree_with_rollover_threshold_0,
&address_tree_with_rollover_threshold_0,
)
.await;
// assert queues have been emptied
assert_queue_len(&pool, &state_trees, &address_trees, &mut 0, 0, false).await;
let mut rpc = pool.get_connection().await.unwrap();
let forester_pubkeys = [config1.derivation_pubkey, config2.derivation_pubkey];
// assert that foresters registered for epoch 1 and 2 (no new work is emitted after epoch 0)
// Assert that foresters have registered all processed epochs and the next epoch (+1)
for epoch in 0..=EXPECTED_EPOCHS {
let total_processed_work =
assert_foresters_registered(&forester_pubkeys[..], &mut rpc, epoch)
.await
.unwrap();
if epoch == 0 {
assert_eq!(
total_processed_work, total_expected_work,
"Not all items were processed."
);
} else {
assert_eq!(
total_processed_work, 0,
"Not all items were processed in prior epoch."
);
}
}
shutdown_sender1
.send(())
.expect("Failed to send shutdown signal to forester 1");
shutdown_sender2
.send(())
.expect("Failed to send shutdown signal to forester 2");
service_handle1.await.unwrap().unwrap();
service_handle2.await.unwrap().unwrap();
}
pub async fn assert_trees_are_rolledover(
pool: &SolanaRpcPool<SolanaRpcConnection>,
state_tree_with_rollover_threshold_0: &Pubkey,
address_tree_with_rollover_threshold_0: &Pubkey,
) {
let mut rpc = pool.get_connection().await.unwrap();
let address_merkle_tree = rpc
.get_anchor_account::<AddressMerkleTreeAccount>(address_tree_with_rollover_threshold_0)
.await
.unwrap()
.unwrap();
assert_ne!(
address_merkle_tree
.metadata
.rollover_metadata
.rolledover_slot,
u64::MAX,
"address_merkle_tree: {:?}",
address_merkle_tree
);
let state_merkle_tree = rpc
.get_anchor_account::<AddressMerkleTreeAccount>(state_tree_with_rollover_threshold_0)
.await
.unwrap()
.unwrap();
assert_ne!(
state_merkle_tree.metadata.rollover_metadata.rolledover_slot,
u64::MAX,
"state_merkle_tree: {:?}",
state_merkle_tree
);
}
async fn assert_foresters_registered(
foresters: &[Pubkey],
rpc: &mut SolanaRpcConnection,
epoch: u64,
) -> Result<u64, RpcError> {
let mut performed_work = 0;
for (i, forester) in foresters.iter().enumerate() {
let forester_epoch_pda = get_forester_epoch_pda_from_authority(forester, epoch).0;
let forester_epoch_pda = rpc
.get_anchor_account::<ForesterEpochPda>(&forester_epoch_pda)
.await?;
println!("forester_epoch_pda {}: {:?}", i, forester_epoch_pda);
if let Some(forester_epoch_pda) = forester_epoch_pda {
// If one forester is first for both queues there will be no work left
// - this assert is flaky
// assert!(
// forester_epoch_pda.work_counter > 0,
// "forester {} did not perform any work",
// i
// );
performed_work += forester_epoch_pda.work_counter;
} else {
return Err(RpcError::CustomError(format!(
"Forester {} not registered",
i,
)));
}
}
Ok(performed_work)
}
#[tokio::test(flavor = "multi_thread", worker_threads = 32)]
async fn test_epoch_double_registration() {
println!("*****************************************************************");
init(Some(LightValidatorConfig {
enable_indexer: false,
wait_time: 10,
prover_config: Some(ProverConfig {
run_mode: Some(ProverMode::ForesterTest),
circuits: vec![],
}),
}))
.await;
let forester_keypair = Keypair::new();
let mut env_accounts = EnvAccounts::get_local_test_validator_accounts();
env_accounts.forester = forester_keypair.insecure_clone();
let mut config = forester_config();
config.payer_keypair = forester_keypair.insecure_clone();
let pool = SolanaRpcPool::<SolanaRpcConnection>::new(
config.external_services.rpc_url.to_string(),
CommitmentConfig::confirmed(),
config.general_config.rpc_pool_size as u32,
)
.await
.unwrap();
let mut rpc = SolanaRpcConnection::new(SolanaRpcUrl::Localnet, None);
rpc.payer = forester_keypair.insecure_clone();
rpc.airdrop_lamports(&forester_keypair.pubkey(), LAMPORTS_PER_SOL * 100_000)
.await
.unwrap();
rpc.airdrop_lamports(
&env_accounts.governance_authority.pubkey(),
LAMPORTS_PER_SOL * 100_000,
)
.await
.unwrap();
register_test_forester(
&mut rpc,
&env_accounts.governance_authority,
&forester_keypair.pubkey(),
light_registry::ForesterConfig::default(),
)
.await
.unwrap();
let new_forester_keypair = Keypair::new();
rpc.airdrop_lamports(&new_forester_keypair.pubkey(), LAMPORTS_PER_SOL * 100_000)
.await
.unwrap();
update_test_forester(
&mut rpc,
&forester_keypair,
&forester_keypair.pubkey(),
Some(&new_forester_keypair),
light_registry::ForesterConfig::default(),
)
.await
.unwrap();
config.derivation_pubkey = forester_keypair.pubkey();
config.payer_keypair = new_forester_keypair.insecure_clone();
let config = Arc::new(config);
let indexer: TestIndexer<SolanaRpcConnection> =
TestIndexer::init_from_env(&config.payer_keypair, &env_accounts, None).await;
let indexer = Arc::new(Mutex::new(indexer));
for _ in 0..10 {
let (shutdown_sender, shutdown_receiver) = oneshot::channel();
let (work_report_sender, _work_report_receiver) = mpsc::channel(100);
// Run the forester pipeline
let service_handle = tokio::spawn(run_pipeline(
config.clone(),
indexer.clone(),
shutdown_receiver,
work_report_sender.clone(),
));
sleep(Duration::from_secs(2)).await;
shutdown_sender
.send(())
.expect("Failed to send shutdown signal");
let result = service_handle.await.unwrap();
assert!(result.is_ok(), "Registration should succeed");
}
let mut rpc = pool.get_connection().await.unwrap();
let protocol_config = get_protocol_config(&mut *rpc).await;
let solana_slot = rpc.get_slot().await.unwrap();
let current_epoch = protocol_config.get_current_epoch(solana_slot);
let forester_epoch_pda_address =
get_forester_epoch_pda_from_authority(&config.derivation_pubkey, current_epoch).0;
let forester_epoch_pda = rpc
.get_anchor_account::<ForesterEpochPda>(&forester_epoch_pda_address)
.await
.unwrap();
assert!(
forester_epoch_pda.is_some(),
"Forester should be registered"
);
let forester_epoch_pda = forester_epoch_pda.unwrap();
assert_eq!(
forester_epoch_pda.epoch, current_epoch,
"Registered epoch should match current epoch"
);
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/tests/test_utils.rs
|
use account_compression::initialize_address_merkle_tree::Pubkey;
use forester::config::{ExternalServicesConfig, GeneralConfig};
use forester::metrics::register_metrics;
use forester::photon_indexer::PhotonIndexer;
use forester::telemetry::setup_telemetry;
use forester::ForesterConfig;
use forester_utils::indexer::{Indexer, IndexerError, NewAddressProofWithContext};
use light_client::rpc::{RpcConnection, SolanaRpcConnection};
use light_program_test::test_env::get_test_env_accounts;
use light_prover_client::gnark::helpers::{spawn_validator, LightValidatorConfig};
use light_test_utils::e2e_test_env::{GeneralActionConfig, KeypairActionConfig, User};
use light_test_utils::indexer::TestIndexer;
use solana_sdk::signature::{Keypair, Signer};
use tracing::debug;
#[allow(dead_code)]
pub async fn init(config: Option<LightValidatorConfig>) {
setup_telemetry();
register_metrics();
spawn_test_validator(config).await;
}
#[allow(dead_code)]
pub async fn spawn_test_validator(config: Option<LightValidatorConfig>) {
let config = config.unwrap_or_default();
spawn_validator(config).await;
}
#[allow(dead_code)]
pub fn keypair_action_config() -> KeypairActionConfig {
KeypairActionConfig {
compress_sol: Some(1.0),
decompress_sol: Some(1.0),
transfer_sol: Some(1.0),
create_address: Some(1.0),
compress_spl: Some(1.0),
decompress_spl: Some(1.0),
mint_spl: Some(1.0),
transfer_spl: Some(1.0),
max_output_accounts: Some(3),
fee_assert: false,
approve_spl: None,
revoke_spl: None,
freeze_spl: None,
thaw_spl: None,
burn_spl: None,
}
}
#[allow(dead_code)]
pub fn general_action_config() -> GeneralActionConfig {
GeneralActionConfig {
add_keypair: Some(1.0),
create_state_mt: Some(1.0),
create_address_mt: Some(1.0),
nullify_compressed_accounts: Some(1.0),
empty_address_queue: Some(1.0),
rollover: None,
add_forester: None,
disable_epochs: true,
}
}
#[allow(dead_code)]
pub fn forester_config() -> ForesterConfig {
let mut env_accounts = get_test_env_accounts();
env_accounts.forester = Keypair::new();
ForesterConfig {
external_services: ExternalServicesConfig {
rpc_url: "http://localhost:8899".to_string(),
ws_rpc_url: Some("ws://localhost:8900".to_string()),
indexer_url: Some("http://localhost:8784".to_string()),
prover_url: Some("http://localhost:3001".to_string()),
photon_api_key: None,
pushgateway_url: None,
pagerduty_routing_key: None,
},
retry_config: Default::default(),
queue_config: Default::default(),
indexer_config: Default::default(),
transaction_config: Default::default(),
general_config: GeneralConfig {
rpc_pool_size: 20,
slot_update_interval_seconds: 10,
tree_discovery_interval_seconds: 5,
enable_metrics: false,
},
registry_pubkey: light_registry::ID,
payer_keypair: env_accounts.forester.insecure_clone(),
derivation_pubkey: env_accounts.forester.pubkey(),
address_tree_data: vec![],
state_tree_data: vec![],
}
}
// truncate to <254 bit
#[allow(dead_code)]
pub fn generate_pubkey_254() -> Pubkey {
let mock_address: Pubkey = Pubkey::new_unique();
let mut mock_address_less_than_254_bit: [u8; 32] = mock_address.to_bytes();
mock_address_less_than_254_bit[0] = 0;
Pubkey::from(mock_address_less_than_254_bit)
}
#[allow(dead_code)]
pub async fn assert_new_address_proofs_for_photon_and_test_indexer<R: RpcConnection>(
indexer: &mut TestIndexer<SolanaRpcConnection>,
trees: &[Pubkey],
addresses: &[Pubkey],
photon_indexer: &PhotonIndexer<R>,
) {
for (tree, address) in trees.iter().zip(addresses.iter()) {
let address_proof_test_indexer = indexer
.get_multiple_new_address_proofs(tree.to_bytes(), vec![address.to_bytes()])
.await;
let address_proof_photon = photon_indexer
.get_multiple_new_address_proofs(tree.to_bytes(), vec![address.to_bytes()])
.await;
if address_proof_photon.is_err() {
panic!("Photon error: {:?}", address_proof_photon);
}
if address_proof_test_indexer.is_err() {
panic!("Test indexer error: {:?}", address_proof_test_indexer);
}
let photon_result: NewAddressProofWithContext =
address_proof_photon.unwrap().first().unwrap().clone();
let test_indexer_result: NewAddressProofWithContext =
address_proof_test_indexer.unwrap().first().unwrap().clone();
debug!(
"assert proofs for address: {} photon result: {:?} test indexer result: {:?}",
address, photon_result, test_indexer_result
);
assert_eq!(photon_result.merkle_tree, test_indexer_result.merkle_tree);
assert_eq!(
photon_result.low_address_index,
test_indexer_result.low_address_index
);
assert_eq!(
photon_result.low_address_value,
test_indexer_result.low_address_value
);
assert_eq!(
photon_result.low_address_next_index,
test_indexer_result.low_address_next_index
);
assert_eq!(
photon_result.low_address_next_value,
test_indexer_result.low_address_next_value
);
assert_eq!(
photon_result.low_address_proof.len(),
test_indexer_result.low_address_proof.len()
);
assert_eq!(photon_result.root, test_indexer_result.root);
assert_eq!(photon_result.root_seq, test_indexer_result.root_seq);
for (photon_proof_hash, test_indexer_proof_hash) in photon_result
.low_address_proof
.iter()
.zip(test_indexer_result.low_address_proof.iter())
{
assert_eq!(photon_proof_hash, test_indexer_proof_hash);
}
}
}
#[allow(dead_code)]
pub async fn assert_accounts_by_owner<R: RpcConnection>(
indexer: &mut TestIndexer<R>,
user: &User,
photon_indexer: &PhotonIndexer<R>,
) {
let mut photon_accs = photon_indexer
.get_rpc_compressed_accounts_by_owner(&user.keypair.pubkey())
.await
.unwrap();
photon_accs.sort();
let mut test_accs = indexer
.get_rpc_compressed_accounts_by_owner(&user.keypair.pubkey())
.await
.unwrap();
test_accs.sort();
debug!(
"asserting accounts for user: {} Test accs: {:?} Photon accs: {:?}",
user.keypair.pubkey().to_string(),
test_accs.len(),
photon_accs.len()
);
assert_eq!(test_accs.len(), photon_accs.len());
debug!("test_accs: {:?}", test_accs);
debug!("photon_accs: {:?}", photon_accs);
for (test_acc, indexer_acc) in test_accs.iter().zip(photon_accs.iter()) {
assert_eq!(test_acc, indexer_acc);
}
}
#[allow(dead_code)]
pub async fn assert_account_proofs_for_photon_and_test_indexer<R: RpcConnection>(
indexer: &mut TestIndexer<R>,
user_pubkey: &Pubkey,
photon_indexer: &PhotonIndexer<R>,
) {
let accs: Result<Vec<String>, IndexerError> = indexer
.get_rpc_compressed_accounts_by_owner(user_pubkey)
.await;
for account_hash in accs.unwrap() {
let photon_result = photon_indexer
.get_multiple_compressed_account_proofs(vec![account_hash.clone()])
.await;
let test_indexer_result = indexer
.get_multiple_compressed_account_proofs(vec![account_hash.clone()])
.await;
if photon_result.is_err() {
panic!("Photon error: {:?}", photon_result);
}
if test_indexer_result.is_err() {
panic!("Test indexer error: {:?}", test_indexer_result);
}
let photon_result = photon_result.unwrap();
let test_indexer_result = test_indexer_result.unwrap();
debug!(
"assert proofs for account: {} photon result: {:?} test indexer result: {:?}",
account_hash, photon_result, test_indexer_result
);
assert_eq!(photon_result.len(), test_indexer_result.len());
for (photon_proof, test_indexer_proof) in
photon_result.iter().zip(test_indexer_result.iter())
{
assert_eq!(photon_proof.hash, test_indexer_proof.hash);
assert_eq!(photon_proof.leaf_index, test_indexer_proof.leaf_index);
assert_eq!(photon_proof.merkle_tree, test_indexer_proof.merkle_tree);
assert_eq!(photon_proof.root_seq, test_indexer_proof.root_seq);
assert_eq!(photon_proof.proof.len(), test_indexer_proof.proof.len());
for (photon_proof_hash, test_indexer_proof_hash) in photon_proof
.proof
.iter()
.zip(test_indexer_proof.proof.iter())
{
assert_eq!(photon_proof_hash, test_indexer_proof_hash);
}
}
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/telemetry.rs
|
use env_logger::Env;
use std::sync::Once;
use tracing_appender::rolling::{RollingFileAppender, Rotation};
use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer};
static INIT: Once = Once::new();
pub fn setup_telemetry() {
INIT.call_once(|| {
let file_appender = RollingFileAppender::new(Rotation::HOURLY, "logs", "forester.log");
let (non_blocking, _guard) = tracing_appender::non_blocking(file_appender);
let env_filter = EnvFilter::try_from_default_env()
.unwrap_or_else(|_| EnvFilter::new("info,forester=debug"));
let file_env_filter = EnvFilter::new("info,forester=debug");
let stdout_env_filter =
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"));
let stdout_layer = fmt::Layer::new()
.with_writer(std::io::stdout)
.with_ansi(true)
.with_filter(stdout_env_filter);
let file_layer = fmt::Layer::new()
.with_writer(non_blocking)
.with_filter(file_env_filter);
tracing_subscriber::registry()
.with(env_filter)
.with(stdout_layer)
.with(file_layer)
.init();
// Keep _guard in scope to keep the non-blocking writer alive
std::mem::forget(_guard);
});
}
pub fn setup_logger() {
let env = Env::new().filter_or("RUST_LOG", "info,forester=debug");
env_logger::Builder::from_env(env).init();
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/photon_indexer.rs
|
use crate::utils::decode_hash;
use account_compression::initialize_address_merkle_tree::Pubkey;
use forester_utils::indexer::{Indexer, IndexerError, MerkleProof, NewAddressProofWithContext};
use light_client::rpc::RpcConnection;
use photon_api::apis::configuration::{ApiKey, Configuration};
use photon_api::models::{AddressWithTree, GetCompressedAccountsByOwnerPostRequestParams};
use solana_sdk::bs58;
use std::fmt::Debug;
use tracing::debug;
pub struct PhotonIndexer<R: RpcConnection> {
configuration: Configuration,
#[allow(dead_code)]
rpc: R,
}
impl<R: RpcConnection> PhotonIndexer<R> {
pub fn new(path: String, api_key: Option<String>, rpc: R) -> Self {
let configuration = Configuration {
base_path: path,
api_key: api_key.map(|key| ApiKey {
prefix: Some("api-key".to_string()),
key,
}),
..Default::default()
};
PhotonIndexer { configuration, rpc }
}
}
impl<R: RpcConnection> Debug for PhotonIndexer<R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("PhotonIndexer")
.field("configuration", &self.configuration)
.finish()
}
}
impl<R: RpcConnection> Indexer<R> for PhotonIndexer<R> {
async fn get_multiple_compressed_account_proofs(
&self,
hashes: Vec<String>,
) -> Result<Vec<MerkleProof>, IndexerError> {
debug!("Getting proofs for {:?}", hashes);
let request: photon_api::models::GetMultipleCompressedAccountProofsPostRequest =
photon_api::models::GetMultipleCompressedAccountProofsPostRequest {
params: hashes,
..Default::default()
};
let result = photon_api::apis::default_api::get_multiple_compressed_account_proofs_post(
&self.configuration,
request,
)
.await;
match result {
Ok(response) => {
match response.result {
Some(result) => {
let proofs = result
.value
.iter()
.map(|x| {
let mut proof_result_value = x.proof.clone();
proof_result_value.truncate(proof_result_value.len() - 10); // Remove canopy
let proof: Vec<[u8; 32]> =
proof_result_value.iter().map(|x| decode_hash(x)).collect();
MerkleProof {
hash: x.hash.clone(),
leaf_index: x.leaf_index,
merkle_tree: x.merkle_tree.clone(),
proof,
root_seq: x.root_seq,
}
})
.collect();
Ok(proofs)
}
None => {
let error = response.error.unwrap();
Err(IndexerError::Custom(error.message.unwrap()))
}
}
}
Err(e) => Err(IndexerError::Custom(e.to_string())),
}
}
async fn get_rpc_compressed_accounts_by_owner(
&self,
owner: &Pubkey,
) -> Result<Vec<String>, IndexerError> {
let request = photon_api::models::GetCompressedAccountsByOwnerPostRequest {
params: Box::from(GetCompressedAccountsByOwnerPostRequestParams {
cursor: None,
data_slice: None,
filters: None,
limit: None,
owner: owner.to_string(),
}),
..Default::default()
};
let result = photon_api::apis::default_api::get_compressed_accounts_by_owner_post(
&self.configuration,
request,
)
.await
.unwrap();
let accs = result.result.unwrap().value;
let mut hashes = Vec::new();
for acc in accs.items {
hashes.push(acc.hash);
}
Ok(hashes)
}
async fn get_multiple_new_address_proofs(
&self,
merkle_tree_pubkey: [u8; 32],
addresses: Vec<[u8; 32]>,
) -> Result<Vec<NewAddressProofWithContext>, IndexerError> {
let params: Vec<AddressWithTree> = addresses
.iter()
.map(|x| AddressWithTree {
address: bs58::encode(x).into_string(),
tree: bs58::encode(&merkle_tree_pubkey).into_string(),
})
.collect();
let request = photon_api::models::GetMultipleNewAddressProofsV2PostRequest {
params,
..Default::default()
};
debug!("Request: {:?}", request);
let result = photon_api::apis::default_api::get_multiple_new_address_proofs_v2_post(
&self.configuration,
request,
)
.await;
debug!("Response: {:?}", result);
if result.is_err() {
return Err(IndexerError::Custom(result.err().unwrap().to_string()));
}
let photon_proofs = result.unwrap().result.unwrap().value;
let mut proofs: Vec<NewAddressProofWithContext> = Vec::new();
for photon_proof in photon_proofs {
let tree_pubkey = decode_hash(&photon_proof.merkle_tree);
let low_address_value = decode_hash(&photon_proof.lower_range_address);
let next_address_value = decode_hash(&photon_proof.higher_range_address);
let proof = NewAddressProofWithContext {
merkle_tree: tree_pubkey,
low_address_index: photon_proof.low_element_leaf_index as u64,
low_address_value,
low_address_next_index: photon_proof.next_index as u64,
low_address_next_value: next_address_value,
low_address_proof: {
let mut proof_vec: Vec<[u8; 32]> = photon_proof
.proof
.iter()
.map(|x: &String| decode_hash(x))
.collect();
proof_vec.truncate(proof_vec.len() - 10); // Remove canopy
let mut proof_arr = [[0u8; 32]; 16];
proof_arr.copy_from_slice(&proof_vec);
proof_arr
},
root: decode_hash(&photon_proof.root),
root_seq: photon_proof.root_seq,
new_low_element: None,
new_element: None,
new_element_next_value: None,
};
proofs.push(proof);
}
Ok(proofs)
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/slot_tracker.rs
|
use light_client::rpc::RpcConnection;
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::UNIX_EPOCH;
use std::{sync::Arc, time::SystemTime};
use tokio::time::{sleep, Duration};
use tracing::{debug, error};
pub fn slot_duration() -> Duration {
Duration::from_nanos(solana_sdk::genesis_config::GenesisConfig::default().ns_per_slot() as u64)
}
#[derive(Debug)]
pub struct SlotTracker {
last_known_slot: AtomicU64,
last_update_time: AtomicU64,
update_interval: Duration,
}
impl SlotTracker {
pub fn new(initial_slot: u64, update_interval: Duration) -> Self {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis() as u64;
Self {
last_known_slot: AtomicU64::new(initial_slot),
last_update_time: AtomicU64::new(now),
update_interval,
}
}
pub fn update(&self, new_slot: u64) {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis() as u64;
self.last_known_slot.store(new_slot, Ordering::Release);
self.last_update_time.store(now, Ordering::Release);
}
pub fn estimated_current_slot(&self) -> u64 {
let last_slot = self.last_known_slot.load(Ordering::Acquire);
let last_update = self.last_update_time.load(Ordering::Acquire);
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis() as u64;
let elapsed = Duration::from_millis(now - last_update);
let estimated_slot =
last_slot + (elapsed.as_secs_f64() / slot_duration().as_secs_f64()) as u64;
debug!(
"Estimated current slot: {} (last known: {}, elapsed: {:?})",
estimated_slot, last_slot, elapsed
);
estimated_slot
}
pub async fn run<R: RpcConnection + Send + 'static>(self: Arc<Self>, rpc: &mut R) {
loop {
match rpc.get_slot().await {
Ok(slot) => {
self.update(slot);
}
Err(e) => error!("Failed to get slot: {:?}", e),
}
tokio::time::sleep(self.update_interval).await;
}
}
}
pub async fn wait_until_slot_reached<R: RpcConnection>(
rpc: &mut R,
slot_tracker: &Arc<SlotTracker>,
target_slot: u64,
) -> crate::Result<()> {
debug!("Waiting for slot {}", target_slot);
loop {
let current_estimated_slot = slot_tracker.estimated_current_slot();
if current_estimated_slot >= target_slot {
// Double-check with actual RPC call
let actual_slot = rpc.get_slot().await?;
if actual_slot >= target_slot {
break;
}
}
let sleep_duration = if current_estimated_slot < target_slot {
let slots_to_wait = target_slot - current_estimated_slot;
Duration::from_secs_f64(slots_to_wait as f64 * slot_duration().as_secs_f64())
} else {
slot_duration()
};
debug!(
"Estimated slot: {}, waiting for {} seconds",
current_estimated_slot,
sleep_duration.as_secs_f64()
);
sleep(sleep_duration).await;
}
debug!("Slot {} reached", target_slot);
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/forester_status.rs
|
use anchor_lang::{AccountDeserialize, Discriminator};
use forester_utils::forester_epoch::TreeType;
use light_client::rpc::{RpcConnection, SolanaRpcConnection};
use light_registry::{protocol_config::state::ProtocolConfigPda, EpochPda, ForesterEpochPda};
use solana_sdk::{account::ReadableAccount, commitment_config::CommitmentConfig};
use std::sync::Arc;
use tracing::{debug, warn};
use crate::rollover::get_tree_fullness;
use crate::{
cli::StatusArgs,
metrics::{push_metrics, register_metrics},
run_queue_info,
tree_data_sync::fetch_trees,
ForesterConfig,
};
pub async fn fetch_forester_status(args: &StatusArgs) {
let commitment_config = CommitmentConfig::confirmed();
let client = solana_client::rpc_client::RpcClient::new_with_commitment(
args.rpc_url.clone(),
commitment_config,
);
let registry_accounts = client
.get_program_accounts(&light_registry::ID)
.expect("Failed to fetch accounts for registry program.");
let mut forester_epoch_pdas = vec![];
let mut epoch_pdas = vec![];
let mut protocol_config_pdas = vec![];
for (_, account) in registry_accounts {
match account.data()[0..8].try_into().unwrap() {
ForesterEpochPda::DISCRIMINATOR => {
let forester_epoch_pda =
ForesterEpochPda::try_deserialize_unchecked(&mut account.data())
.expect("Failed to deserialize ForesterEpochPda");
forester_epoch_pdas.push(forester_epoch_pda);
}
EpochPda::DISCRIMINATOR => {
let epoch_pda = EpochPda::try_deserialize_unchecked(&mut account.data())
.expect("Failed to deserialize EpochPda");
epoch_pdas.push(epoch_pda);
}
ProtocolConfigPda::DISCRIMINATOR => {
let protocol_config_pda =
ProtocolConfigPda::try_deserialize_unchecked(&mut account.data())
.expect("Failed to deserialize ProtocolConfigPda");
protocol_config_pdas.push(protocol_config_pda);
}
_ => (),
}
}
forester_epoch_pdas.sort_by(|a, b| a.epoch.cmp(&b.epoch));
epoch_pdas.sort_by(|a, b| a.epoch.cmp(&b.epoch));
let slot = client.get_slot().expect("Failed to fetch slot.");
let current_active_epoch = protocol_config_pdas[0]
.config
.get_current_active_epoch(slot)
.unwrap();
let current_registration_epoch = protocol_config_pdas[0]
.config
.get_latest_register_epoch(slot)
.unwrap();
println!("Current active epoch: {:?}", current_active_epoch);
println!(
"Current registration epoch: {:?}",
current_registration_epoch
);
println!(
"Forester registered for latest epoch: {:?}",
forester_epoch_pdas
.iter()
.any(|pda| pda.epoch == current_registration_epoch)
);
println!(
"Forester registered for active epoch: {:?}",
forester_epoch_pdas
.iter()
.any(|pda| pda.epoch == current_active_epoch)
);
println!(
"current active epoch progress {:?} / {}",
protocol_config_pdas[0]
.config
.get_current_active_epoch_progress(slot),
protocol_config_pdas[0].config.active_phase_length
);
println!(
"current active epoch progress {:.2?}%",
protocol_config_pdas[0]
.config
.get_current_active_epoch_progress(slot) as f64
/ protocol_config_pdas[0].config.active_phase_length as f64
* 100f64
);
println!("Hours until next epoch : {:?} hours", {
// slotduration is 460ms and 1000ms is 1 second and 3600 seconds is 1 hour
protocol_config_pdas[0]
.config
.active_phase_length
.saturating_sub(
protocol_config_pdas[0]
.config
.get_current_active_epoch_progress(slot),
)
* 460
/ 1000
/ 3600
});
let slots_until_next_registration = protocol_config_pdas[0]
.config
.registration_phase_length
.saturating_sub(
protocol_config_pdas[0]
.config
.get_current_active_epoch_progress(slot),
);
println!(
"Slots until next registration : {:?}",
slots_until_next_registration
);
println!(
"Hours until next registration : {:?} hours",
// slotduration is 460ms and 1000ms is 1 second and 3600 seconds is 1 hour
slots_until_next_registration * 460 / 1000 / 3600
);
if args.full {
for epoch in &epoch_pdas {
println!("Epoch: {:?}", epoch.epoch);
let registered_foresters_in_epoch = forester_epoch_pdas
.iter()
.filter(|pda| pda.epoch == epoch.epoch);
for forester in registered_foresters_in_epoch {
println!("Forester authority: {:?}", forester.authority);
}
}
}
if args.protocol_config {
println!("protocol config: {:?}", protocol_config_pdas[0]);
}
let config = Arc::new(ForesterConfig::new_for_status(args).unwrap());
if config.general_config.enable_metrics {
register_metrics();
}
debug!("Fetching trees...");
debug!("RPC URL: {}", config.external_services.rpc_url);
let mut rpc = SolanaRpcConnection::new(config.external_services.rpc_url.clone(), None);
let trees = fetch_trees(&rpc).await.unwrap();
if trees.is_empty() {
warn!("No trees found. Exiting.");
}
run_queue_info(config.clone(), trees.clone(), TreeType::State).await;
run_queue_info(config.clone(), trees.clone(), TreeType::Address).await;
for tree in &trees {
let tree_type = format!(
"[{}]",
match tree.tree_type {
TreeType::State => "State",
TreeType::Address => "Address",
}
);
let tree_info = get_tree_fullness(&mut rpc, tree.merkle_tree, tree.tree_type)
.await
.unwrap();
let fullness_percentage = tree_info.fullness * 100.0;
println!(
"{} Tree {}: Fullness: {:.4}% | Next Index: {} | Threshold: {}",
tree_type,
&tree.merkle_tree,
format!("{:.2}%", fullness_percentage),
tree_info.next_index,
tree_info.threshold
);
}
push_metrics(&config.external_services.pushgateway_url)
.await
.unwrap();
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/config.rs
|
use crate::cli::{StartArgs, StatusArgs};
use crate::errors::ForesterError;
use account_compression::initialize_address_merkle_tree::Pubkey;
use account_compression::utils::constants::{ADDRESS_QUEUE_VALUES, STATE_NULLIFIER_QUEUE_VALUES};
use anchor_lang::Id;
use forester_utils::forester_epoch::{Epoch, TreeAccounts, TreeForesterSchedule};
use light_client::rpc::RetryConfig;
use light_registry::{EpochPda, ForesterEpochPda};
use solana_sdk::signature::Keypair;
use std::str::FromStr;
use std::time::Duration;
#[derive(Debug)]
pub struct ForesterConfig {
pub external_services: ExternalServicesConfig,
pub retry_config: RetryConfig,
pub queue_config: QueueConfig,
pub indexer_config: IndexerConfig,
pub transaction_config: TransactionConfig,
pub general_config: GeneralConfig,
pub registry_pubkey: Pubkey,
pub payer_keypair: Keypair,
pub derivation_pubkey: Pubkey,
pub address_tree_data: Vec<TreeAccounts>,
pub state_tree_data: Vec<TreeAccounts>,
}
#[derive(Debug, Clone)]
pub struct ExternalServicesConfig {
pub rpc_url: String,
pub ws_rpc_url: Option<String>,
pub indexer_url: Option<String>,
pub prover_url: Option<String>,
pub photon_api_key: Option<String>,
pub pushgateway_url: Option<String>,
pub pagerduty_routing_key: Option<String>,
}
#[derive(Debug, Clone, Copy)]
pub struct QueueConfig {
pub state_queue_start_index: u16,
pub state_queue_length: u16,
pub address_queue_start_index: u16,
pub address_queue_length: u16,
}
#[derive(Debug, Clone)]
pub struct IndexerConfig {
pub batch_size: usize,
pub max_concurrent_batches: usize,
}
#[derive(Debug, Clone)]
pub struct TransactionConfig {
pub batch_size: usize,
pub max_concurrent_batches: usize,
pub cu_limit: u32,
}
#[derive(Debug, Clone)]
pub struct GeneralConfig {
pub rpc_pool_size: usize,
pub slot_update_interval_seconds: u64,
pub tree_discovery_interval_seconds: u64,
pub enable_metrics: bool,
}
impl Default for QueueConfig {
fn default() -> Self {
QueueConfig {
state_queue_start_index: 0,
state_queue_length: STATE_NULLIFIER_QUEUE_VALUES,
address_queue_start_index: 0,
address_queue_length: ADDRESS_QUEUE_VALUES,
}
}
}
impl Default for IndexerConfig {
fn default() -> Self {
Self {
batch_size: 50,
max_concurrent_batches: 10,
}
}
}
impl Default for TransactionConfig {
fn default() -> Self {
Self {
batch_size: 1,
max_concurrent_batches: 20,
cu_limit: 1_000_000,
}
}
}
impl ForesterConfig {
pub fn new_for_start(args: &StartArgs) -> Result<Self, ForesterError> {
let registry_pubkey = light_registry::program::LightRegistry::id().to_string();
let payer: Vec<u8> = match &args.payer {
Some(payer_str) => serde_json::from_str(payer_str)
.map_err(|e| ForesterError::ConfigError(e.to_string()))?,
None => return Err(ForesterError::ConfigError("Payer is required".to_string())),
};
let payer =
Keypair::from_bytes(&payer).map_err(|e| ForesterError::ConfigError(e.to_string()))?;
let derivation: Vec<u8> = match &args.derivation {
Some(derivation_str) => serde_json::from_str(derivation_str)
.map_err(|e| ForesterError::ConfigError(e.to_string()))?,
None => {
return Err(ForesterError::ConfigError(
"Derivation is required".to_string(),
))
}
};
let derivation_array: [u8; 32] = derivation
.try_into()
.map_err(|_| ForesterError::ConfigError("Derivation must be 32 bytes".to_string()))?;
let derivation = Pubkey::from(derivation_array);
let rpc_url = args
.rpc_url
.clone()
.ok_or_else(|| ForesterError::ConfigError("RPC URL is required".to_string()))?;
Ok(Self {
external_services: ExternalServicesConfig {
rpc_url,
ws_rpc_url: args.ws_rpc_url.clone(),
indexer_url: args.indexer_url.clone(),
prover_url: args.prover_url.clone(),
photon_api_key: args.photon_api_key.clone(),
pushgateway_url: args.push_gateway_url.clone(),
pagerduty_routing_key: args.pagerduty_routing_key.clone(),
},
retry_config: RetryConfig {
max_retries: args.max_retries,
retry_delay: Duration::from_millis(args.retry_delay),
timeout: Duration::from_millis(args.retry_timeout),
},
queue_config: QueueConfig {
state_queue_start_index: args.state_queue_start_index,
state_queue_length: args.state_queue_processing_length,
address_queue_start_index: args.address_queue_start_index,
address_queue_length: args.address_queue_processing_length,
},
indexer_config: IndexerConfig {
batch_size: args.indexer_batch_size,
max_concurrent_batches: args.indexer_max_concurrent_batches,
},
transaction_config: TransactionConfig {
batch_size: args.transaction_batch_size,
max_concurrent_batches: args.transaction_max_concurrent_batches,
cu_limit: args.cu_limit,
},
general_config: GeneralConfig {
rpc_pool_size: args.rpc_pool_size,
slot_update_interval_seconds: args.slot_update_interval_seconds,
tree_discovery_interval_seconds: args.tree_discovery_interval_seconds,
enable_metrics: args.enable_metrics(),
},
registry_pubkey: Pubkey::from_str(®istry_pubkey)
.map_err(|e| ForesterError::ConfigError(e.to_string()))?,
payer_keypair: payer,
derivation_pubkey: derivation,
address_tree_data: vec![],
state_tree_data: vec![],
})
}
pub fn new_for_status(args: &StatusArgs) -> Result<Self, ForesterError> {
let rpc_url = args.rpc_url.clone();
Ok(Self {
external_services: ExternalServicesConfig {
rpc_url,
ws_rpc_url: None,
indexer_url: None,
prover_url: None,
photon_api_key: None,
pushgateway_url: args.push_gateway_url.clone(),
pagerduty_routing_key: args.pagerduty_routing_key.clone(),
},
retry_config: RetryConfig::default(),
queue_config: QueueConfig::default(),
indexer_config: IndexerConfig::default(),
transaction_config: TransactionConfig::default(),
general_config: GeneralConfig {
rpc_pool_size: 1,
slot_update_interval_seconds: 10,
tree_discovery_interval_seconds: 5,
enable_metrics: args.enable_metrics(),
},
registry_pubkey: Pubkey::default(),
payer_keypair: Keypair::new(),
derivation_pubkey: Pubkey::default(),
address_tree_data: vec![],
state_tree_data: vec![],
})
}
}
impl Clone for ForesterConfig {
fn clone(&self) -> Self {
ForesterConfig {
external_services: self.external_services.clone(),
retry_config: self.retry_config,
queue_config: self.queue_config,
indexer_config: self.indexer_config.clone(),
transaction_config: self.transaction_config.clone(),
general_config: self.general_config.clone(),
registry_pubkey: self.registry_pubkey,
payer_keypair: self.payer_keypair.insecure_clone(),
derivation_pubkey: self.derivation_pubkey,
address_tree_data: self.address_tree_data.clone(),
state_tree_data: self.state_tree_data.clone(),
}
}
}
#[derive(Debug, Clone)]
pub struct ForesterEpochInfo {
pub epoch: Epoch,
pub epoch_pda: EpochPda,
pub forester_epoch_pda: ForesterEpochPda,
pub trees: Vec<TreeForesterSchedule>,
}
impl ForesterEpochInfo {
pub fn add_trees_with_schedule(&mut self, trees: &[TreeAccounts], current_solana_slot: u64) {
for tree in trees {
let tree_schedule = TreeForesterSchedule::new_with_schedule(
tree,
current_solana_slot,
&self.forester_epoch_pda,
&self.epoch_pda,
);
self.trees.push(tree_schedule);
}
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/lib.rs
|
pub type Result<T> = std::result::Result<T, ForesterError>;
pub mod cli;
pub mod config;
pub mod epoch_manager;
pub mod errors;
pub mod forester_status;
pub mod metrics;
pub mod pagerduty;
pub mod photon_indexer;
pub mod pubsub_client;
pub mod queue_helpers;
pub mod rollover;
pub mod send_transaction;
mod slot_tracker;
pub mod telemetry;
pub mod tree_data_sync;
pub mod tree_finder;
pub mod utils;
use crate::epoch_manager::{run_service, WorkReport};
use crate::errors::ForesterError;
use crate::metrics::QUEUE_LENGTH;
use crate::queue_helpers::fetch_queue_item_data;
use crate::slot_tracker::SlotTracker;
use crate::utils::get_protocol_config;
use account_compression::utils::constants::{ADDRESS_QUEUE_VALUES, STATE_NULLIFIER_QUEUE_VALUES};
pub use config::{ForesterConfig, ForesterEpochInfo};
use forester_utils::forester_epoch::{TreeAccounts, TreeType};
use forester_utils::indexer::Indexer;
use light_client::rpc::{RpcConnection, SolanaRpcConnection};
use light_client::rpc_pool::SolanaRpcPool;
use solana_sdk::commitment_config::CommitmentConfig;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::{mpsc, oneshot, Mutex};
use tracing::debug;
pub async fn run_queue_info(
config: Arc<ForesterConfig>,
trees: Vec<TreeAccounts>,
queue_type: TreeType,
) {
let mut rpc = SolanaRpcConnection::new(config.external_services.rpc_url.to_string(), None);
let trees: Vec<_> = trees
.iter()
.filter(|t| t.tree_type == queue_type)
.cloned()
.collect();
for tree_data in trees {
let length = if tree_data.tree_type == TreeType::State {
STATE_NULLIFIER_QUEUE_VALUES
} else {
ADDRESS_QUEUE_VALUES
};
let queue_length = fetch_queue_item_data(&mut rpc, &tree_data.queue, 0, length, length)
.await
.unwrap()
.len();
QUEUE_LENGTH
.with_label_values(&[&*queue_type.to_string(), &tree_data.merkle_tree.to_string()])
.set(queue_length as i64);
println!(
"{:?} queue {} length: {}",
queue_type, tree_data.queue, queue_length
);
}
}
pub async fn run_pipeline<R: RpcConnection, I: Indexer<R>>(
config: Arc<ForesterConfig>,
indexer: Arc<Mutex<I>>,
shutdown: oneshot::Receiver<()>,
work_report_sender: mpsc::Sender<WorkReport>,
) -> Result<()> {
let rpc_pool = SolanaRpcPool::<R>::new(
config.external_services.rpc_url.to_string(),
CommitmentConfig::confirmed(),
config.general_config.rpc_pool_size as u32,
)
.await
.map_err(|e| ForesterError::Custom(e.to_string()))?;
let protocol_config = {
let mut rpc = rpc_pool.get_connection().await?;
get_protocol_config(&mut *rpc).await
};
let arc_pool = Arc::new(rpc_pool);
let arc_pool_clone = Arc::clone(&arc_pool);
let slot = {
let mut rpc = arc_pool.get_connection().await?;
rpc.get_slot().await?
};
let slot_tracker = SlotTracker::new(
slot,
Duration::from_secs(config.general_config.slot_update_interval_seconds),
);
let arc_slot_tracker = Arc::new(slot_tracker);
let arc_slot_tracker_clone = arc_slot_tracker.clone();
tokio::spawn(async move {
let mut rpc = arc_pool_clone
.get_connection()
.await
.expect("Failed to get RPC connection");
SlotTracker::run(arc_slot_tracker_clone, &mut *rpc).await;
});
debug!("Starting Forester pipeline");
run_service(
config,
Arc::new(protocol_config),
arc_pool,
indexer,
shutdown,
work_report_sender,
arc_slot_tracker,
)
.await?;
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/pubsub_client.rs
|
use crate::errors::ForesterError;
use crate::queue_helpers::QueueUpdate;
use crate::ForesterConfig;
use crate::Result;
use account_compression::initialize_address_merkle_tree::Pubkey;
use futures::StreamExt;
use solana_account_decoder::UiAccountEncoding;
use solana_client::nonblocking::pubsub_client::PubsubClient;
use solana_client::rpc_config::{RpcAccountInfoConfig, RpcProgramAccountsConfig};
use solana_sdk::commitment_config::CommitmentConfig;
use std::str::FromStr;
use std::thread;
use tokio::runtime::Builder;
use tokio::sync::mpsc;
use tracing::{debug, error};
pub async fn setup_pubsub_client(
config: &ForesterConfig,
queue_pubkeys: std::collections::HashSet<Pubkey>,
) -> Result<(mpsc::Receiver<QueueUpdate>, mpsc::Sender<()>)> {
let ws_url = match &config.external_services.ws_rpc_url {
Some(url) => url.clone(),
None => {
return Err(ForesterError::Custom(
"PubSub client requires a WebSocket URL".to_string(),
))
}
};
debug!(
"Setting up pubsub client for {} queues",
queue_pubkeys.len()
);
let (update_tx, update_rx) = mpsc::channel(100);
let (shutdown_tx, shutdown_rx) = mpsc::channel(1);
let handle = spawn_pubsub_client(ws_url, queue_pubkeys, update_tx, shutdown_rx);
tokio::spawn(async move {
match handle.join() {
Ok(result) => {
if let Err(e) = result {
error!("PubSub client error: {:?}", e);
} else {
debug!("PubSub client thread completed successfully");
}
}
Err(e) => error!("Failed to join PubSub client thread: {:?}", e),
}
});
Ok((update_rx, shutdown_tx))
}
fn spawn_pubsub_client(
ws_url: String,
queue_pubkeys: std::collections::HashSet<Pubkey>,
update_tx: mpsc::Sender<QueueUpdate>,
mut shutdown_rx: mpsc::Receiver<()>,
) -> thread::JoinHandle<Result<()>> {
thread::spawn(move || {
let rt = Builder::new_current_thread()
.enable_all()
.build()
.map_err(|e| ForesterError::Custom(format!("Failed to build runtime: {}", e)))?;
rt.block_on(async {
debug!("Connecting to PubSub at {}", ws_url);
let pubsub_client = PubsubClient::new(&ws_url).await.map_err(|e| {
ForesterError::Custom(format!("Failed to create PubsubClient: {}", e))
})?;
debug!("PubSub connection established");
let (mut subscription, _) = pubsub_client
.program_subscribe(
&account_compression::id(),
Some(RpcProgramAccountsConfig {
filters: None,
account_config: RpcAccountInfoConfig {
encoding: Some(UiAccountEncoding::Base64),
commitment: Some(CommitmentConfig::confirmed()),
data_slice: None,
min_context_slot: None,
},
with_context: Some(true),
}),
)
.await
.map_err(|e| {
ForesterError::Custom(format!("Failed to subscribe to program: {}", e))
})?;
loop {
tokio::select! {
Some(update) = subscription.next() => {
if let Ok(pubkey) = Pubkey::from_str(&update.value.pubkey) {
if queue_pubkeys.contains(&pubkey) {
debug!("Received update for queue {}", pubkey);
if update_tx.send(QueueUpdate {
pubkey,
slot: update.context.slot,
}).await.is_err() {
debug!("Failed to send update, receiver might have been dropped");
break;
}
}
}
}
_ = shutdown_rx.recv() => {
debug!("Received shutdown signal");
break;
}
}
}
debug!("PubSub client loop ended");
Ok(())
})
})
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/metrics.rs
|
use crate::Result;
use lazy_static::lazy_static;
use prometheus::{Encoder, GaugeVec, IntCounterVec, IntGauge, IntGaugeVec, Registry, TextEncoder};
use reqwest::Client;
use std::sync::Once;
use std::time::{SystemTime, UNIX_EPOCH};
use tokio::sync::Mutex;
use tracing::{debug, error};
lazy_static! {
pub static ref REGISTRY: Registry = Registry::new();
pub static ref QUEUE_LENGTH: IntGaugeVec = IntGaugeVec::new(
prometheus::opts!("queue_length", "Length of the queue"),
&["tree_type", "tree_pubkey"]
)
.expect("metric can be created");
pub static ref LAST_RUN_TIMESTAMP: IntGauge = IntGauge::new(
"forester_last_run_timestamp",
"Timestamp of the last Forester run"
)
.expect("metric can be created");
pub static ref TRANSACTIONS_PROCESSED: IntCounterVec = IntCounterVec::new(
prometheus::opts!(
"forester_transactions_processed_total",
"Total number of transactions processed"
),
&["epoch"]
)
.expect("metric can be created");
pub static ref TRANSACTION_TIMESTAMP: GaugeVec = GaugeVec::new(
prometheus::opts!(
"forester_transaction_timestamp",
"Timestamp of the last processed transaction"
),
&["epoch"]
)
.expect("metric can be created");
pub static ref TRANSACTION_RATE: GaugeVec = GaugeVec::new(
prometheus::opts!(
"forester_transaction_rate",
"Rate of transactions processed per second"
),
&["epoch"]
)
.expect("metric can be created");
pub static ref FORESTER_SOL_BALANCE: GaugeVec = GaugeVec::new(
prometheus::opts!(
"forester_sol_balance",
"Current SOL balance of the forester"
),
&["pubkey"]
)
.expect("metric can be created");
static ref METRIC_UPDATES: Mutex<Vec<(u64, usize, std::time::Duration)>> =
Mutex::new(Vec::new());
}
static INIT: Once = Once::new();
pub fn register_metrics() {
INIT.call_once(|| {
REGISTRY
.register(Box::new(QUEUE_LENGTH.clone()))
.expect("collector can be registered");
REGISTRY
.register(Box::new(LAST_RUN_TIMESTAMP.clone()))
.expect("collector can be registered");
REGISTRY
.register(Box::new(TRANSACTIONS_PROCESSED.clone()))
.expect("collector can be registered");
REGISTRY
.register(Box::new(TRANSACTION_TIMESTAMP.clone()))
.expect("collector can be registered");
REGISTRY
.register(Box::new(TRANSACTION_RATE.clone()))
.expect("collector can be registered");
REGISTRY
.register(Box::new(FORESTER_SOL_BALANCE.clone()))
.expect("collector can be registered");
});
}
pub fn update_last_run_timestamp() {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_secs() as i64;
LAST_RUN_TIMESTAMP.set(now);
}
pub fn update_transactions_processed(epoch: u64, count: usize, duration: std::time::Duration) {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_secs_f64();
TRANSACTIONS_PROCESSED
.with_label_values(&[&epoch.to_string()])
.inc_by(count as u64);
TRANSACTION_TIMESTAMP
.with_label_values(&[&epoch.to_string()])
.set(now);
let rate = count as f64 / duration.as_secs_f64();
TRANSACTION_RATE
.with_label_values(&[&epoch.to_string()])
.set(rate);
debug!(
"Updated metrics for epoch {}: processed = {}, rate = {} tx/s",
epoch, count, rate
);
}
pub async fn queue_metric_update(epoch: u64, count: usize, duration: std::time::Duration) {
let mut updates = METRIC_UPDATES.lock().await;
updates.push((epoch, count, duration));
}
pub async fn process_queued_metrics() {
let mut updates = METRIC_UPDATES.lock().await;
for (epoch, count, duration) in updates.drain(..) {
update_transactions_processed(epoch, count, duration);
}
}
pub fn update_forester_sol_balance(pubkey: &str, balance: f64) {
FORESTER_SOL_BALANCE
.with_label_values(&[pubkey])
.set(balance);
debug!(
"Updated SOL balance for forester {}: {} SOL",
pubkey, balance
);
}
pub async fn push_metrics(url: &Option<String>) -> Result<()> {
let url = match url {
Some(url) => url,
None => {
debug!("Pushgateway URL not set, skipping metrics push");
return Ok(());
}
};
process_queued_metrics().await;
update_last_run_timestamp();
let encoder = TextEncoder::new();
let metric_families = REGISTRY.gather();
let mut buffer = Vec::new();
encoder.encode(&metric_families, &mut buffer)?;
debug!("Pushing metrics to Pushgateway");
let client = Client::new();
let res = client.post(url).body(buffer).send().await?;
if res.status().is_success() {
debug!("Successfully pushed metrics to Pushgateway");
Ok(())
} else {
let error_message = format!(
"Failed to push metrics. Status: {}, Body: {}",
res.status(),
res.text().await?
);
Err(error_message.into())
}
}
pub async fn metrics_handler() -> Result<impl warp::Reply> {
use prometheus::Encoder;
let encoder = TextEncoder::new();
let mut buffer = Vec::new();
if let Err(e) = encoder.encode(®ISTRY.gather(), &mut buffer) {
error!("could not encode custom metrics: {}", e);
};
let mut res = String::from_utf8(buffer.clone()).unwrap_or_else(|e| {
error!("custom metrics could not be from_utf8'd: {}", e);
String::new()
});
buffer.clear();
let mut buffer = Vec::new();
if let Err(e) = encoder.encode(&prometheus::gather(), &mut buffer) {
error!("could not encode prometheus metrics: {}", e);
};
let res_prometheus = String::from_utf8(buffer.clone()).unwrap_or_else(|e| {
error!("prometheus metrics could not be from_utf8'd: {}", e);
String::new()
});
buffer.clear();
res.push_str(&res_prometheus);
Ok(res)
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/queue_helpers.rs
|
use crate::{errors::ForesterError, Result};
use account_compression::initialize_address_merkle_tree::Pubkey;
use account_compression::QueueAccount;
use light_client::rpc::RpcConnection;
use light_hash_set::HashSet;
use std::mem;
use tracing::debug;
#[derive(Debug, Clone)]
pub struct QueueItemData {
pub hash: [u8; 32],
pub index: usize,
}
pub async fn fetch_queue_item_data<R: RpcConnection>(
rpc: &mut R,
queue_pubkey: &Pubkey,
start_index: u16,
processing_length: u16,
queue_length: u16,
) -> Result<Vec<QueueItemData>> {
debug!("Fetching queue data for {:?}", queue_pubkey);
let mut account = rpc
.get_account(*queue_pubkey)
.await?
.ok_or_else(|| ForesterError::Custom("Queue account not found".to_string()))?;
let queue: HashSet = unsafe {
HashSet::from_bytes_copy(&mut account.data[8 + mem::size_of::<QueueAccount>()..])?
};
let end_index = (start_index + processing_length).min(queue_length);
let filtered_queue = queue
.iter()
.filter(|(index, cell)| {
*index >= start_index as usize
&& *index < end_index as usize
&& cell.sequence_number.is_none()
})
.map(|(index, cell)| QueueItemData {
hash: cell.value_bytes(),
index,
})
.collect();
debug!("Queue data fetched: {:?}", filtered_queue);
Ok(filtered_queue)
}
#[derive(Debug)]
pub struct QueueUpdate {
pub pubkey: Pubkey,
pub slot: u64,
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/pagerduty.rs
|
use reqwest::Client;
use serde::Serialize;
use std::time::Duration;
#[derive(Debug, Serialize)]
struct PagerDutyPayload {
routing_key: String,
event_action: String,
payload: PagerDutyAlertPayload,
}
#[derive(Debug, Serialize)]
struct PagerDutyAlertPayload {
summary: String,
severity: String,
source: String,
}
pub async fn send_pagerduty_alert(
routing_key: &str,
summary: &str,
severity: &str,
source: &str,
) -> Result<(), Box<dyn std::error::Error>> {
let client = Client::builder().timeout(Duration::from_secs(10)).build()?;
let payload = PagerDutyPayload {
routing_key: routing_key.to_string(),
event_action: "trigger".to_string(),
payload: PagerDutyAlertPayload {
summary: summary.to_string(),
severity: severity.to_string(),
source: source.to_string(),
},
};
let response = client
.post("https://events.pagerduty.com/v2/enqueue")
.json(&payload)
.send()
.await?;
if !response.status().is_success() {
return Err(format!(
"Failed to send PagerDuty alert. Status: {}, Body: {}",
response.status(),
response.text().await?
)
.into());
}
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/errors.rs
|
use account_compression::initialize_address_merkle_tree::Error as AccountCompressionError;
use config::ConfigError;
use forester_utils::indexer::IndexerError;
use light_client::rpc::errors::RpcError;
use light_client::rpc_pool::PoolError;
use light_hash_set::HashSetError;
use photon_api::apis::{default_api::GetCompressedAccountProofPostError, Error as PhotonApiError};
use prometheus::Error as PrometheusError;
use reqwest::Error as ReqwestError;
use solana_client::pubsub_client::PubsubClientError;
use thiserror::Error;
use tokio::sync::mpsc::error::SendError;
use tokio::sync::oneshot::error::RecvError;
use tokio::task::JoinError;
#[derive(Error, Debug)]
pub enum ForesterError {
#[error("Element is not eligible for foresting")]
NotEligible,
#[error("RPC Error: {0}")]
RpcError(#[from] RpcError),
#[error("failed to deserialize account data")]
DeserializeError(#[from] solana_sdk::program_error::ProgramError),
#[error("failed to copy merkle tree")]
CopyMerkleTreeError(#[from] std::io::Error),
#[error(transparent)]
AccountCompressionError(#[from] AccountCompressionError),
#[error(transparent)]
HashSetError(#[from] HashSetError),
#[error(transparent)]
PhotonApiError(PhotonApiErrorWrapper),
#[error("bincode error")]
BincodeError(#[from] Box<bincode::ErrorKind>),
#[error("Indexer can't find any proofs")]
NoProofsFound,
#[error("Max retries reached")]
MaxRetriesReached,
#[error("error: {0:?}")]
SendError(String),
#[error("error: {0:?}")]
IndexerError(String),
#[error("Recv error: {0}")]
RecvError(#[from] RecvError),
#[error("error: {0:?}")]
JoinError(String),
#[error("Solana pubsub client error: {0}")]
PubsubClientError(#[from] PubsubClientError),
#[error("Channel disconnected")]
ChannelDisconnected,
#[error("Subscription timeout")]
SubscriptionTimeout,
#[error("Unexpected message: {0}")]
UnexpectedMessage(String),
#[error("Config error: {0:?}")]
ConfigError(String),
#[error("error: {0:?}")]
PrometheusError(PrometheusError),
#[error("error: {0:?}")]
ReqwestError(ReqwestError),
#[error("error: {0:?}")]
Custom(String),
#[error("unknown error")]
Unknown,
#[error("ForesterEpochPda not found")]
ForesterEpochPdaNotFound,
}
#[derive(Error, Debug)]
pub enum PhotonApiErrorWrapper {
#[error(transparent)]
GetCompressedAccountProofPostError(#[from] PhotonApiError<GetCompressedAccountProofPostError>),
}
impl From<PhotonApiError<GetCompressedAccountProofPostError>> for ForesterError {
fn from(err: PhotonApiError<GetCompressedAccountProofPostError>) -> Self {
ForesterError::PhotonApiError(PhotonApiErrorWrapper::GetCompressedAccountProofPostError(
err,
))
}
}
impl From<IndexerError> for ForesterError {
fn from(err: IndexerError) -> Self {
ForesterError::IndexerError(err.to_string())
}
}
impl<T> From<SendError<T>> for ForesterError {
fn from(err: SendError<T>) -> Self {
ForesterError::SendError(err.to_string())
}
}
impl From<JoinError> for ForesterError {
fn from(err: JoinError) -> Self {
ForesterError::JoinError(err.to_string())
}
}
impl From<PoolError> for ForesterError {
fn from(err: PoolError) -> Self {
ForesterError::Custom(err.to_string())
}
}
impl From<ConfigError> for ForesterError {
fn from(err: ConfigError) -> Self {
ForesterError::Custom(err.to_string())
}
}
impl From<PrometheusError> for ForesterError {
fn from(err: PrometheusError) -> ForesterError {
ForesterError::PrometheusError(err)
}
}
impl From<ReqwestError> for ForesterError {
fn from(err: ReqwestError) -> ForesterError {
ForesterError::ReqwestError(err)
}
}
impl From<String> for ForesterError {
fn from(err: String) -> ForesterError {
ForesterError::Custom(err)
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/send_transaction.rs
|
use crate::config::QueueConfig;
use crate::epoch_manager::{MerkleProofType, WorkItem};
use crate::errors::ForesterError;
use crate::queue_helpers::fetch_queue_item_data;
use crate::Result;
use account_compression::utils::constants::{
ADDRESS_MERKLE_TREE_CHANGELOG, ADDRESS_MERKLE_TREE_INDEXED_CHANGELOG, ADDRESS_QUEUE_VALUES,
STATE_MERKLE_TREE_CHANGELOG, STATE_NULLIFIER_QUEUE_VALUES,
};
use async_trait::async_trait;
use forester_utils::forester_epoch::{TreeAccounts, TreeType};
use forester_utils::indexer::Indexer;
use futures::future::join_all;
use light_client::rpc::{RetryConfig, RpcConnection};
use light_client::rpc_pool::SolanaRpcPool;
use light_registry::account_compression_cpi::sdk::{
create_nullify_instruction, create_update_address_merkle_tree_instruction,
CreateNullifyInstructionInputs, UpdateAddressMerkleTreeInstructionInputs,
};
use solana_sdk::compute_budget::ComputeBudgetInstruction;
use solana_sdk::instruction::Instruction;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::transaction::Transaction;
use solana_sdk::{
hash::Hash,
signature::{Keypair, Signer},
};
use std::sync::Arc;
use std::time::Duration;
use std::vec;
use tokio::join;
use tokio::sync::Mutex;
use tokio::time::{sleep, Instant};
use tracing::{debug, warn};
#[async_trait]
pub trait TransactionBuilder {
async fn build_signed_transaction_batch(
&self,
payer: &Keypair,
derivation: &Pubkey,
recent_blockhash: &Hash,
work_items: &[WorkItem],
config: BuildTransactionBatchConfig,
) -> Result<Vec<Transaction>>;
}
// We're assuming that:
// 1. Helius slot latency is ~ 3 slots.
// See also: https://p.us5.datadoghq.com/sb/339e0590-c5d4-11ed-9c7b-da7ad0900005-231a672007c47d70f38e8fa321bc8407?fromUser=false&refresh_mode=sliding&tpl_var_leader_name%5B0%5D=%2A&from_ts=1725348612900&to_ts=1725953412900&live=true
// 2. Latency between forester server and helius is ~ 1 slot.
// 3. Slot duration is 500ms.
const LATENCY: Duration = Duration::from_millis(4 * 500);
const TIMEOUT_CHECK_ENABLED: bool = false;
/// Setting:
/// 1. We have 1 light slot 15 seconds and a lot of elements in the queue
/// 2. we want to send as many elements from the queue as possible
///
/// Strategy:
/// 1. Execute transaction batches until max number of batches is
/// reached or light slot ended (global timeout).
/// 2. Fetch queue items.
/// 3. If work items is empty, await minimum batch time.
/// 4. Fetch recent blockhash.
/// 5. Iterate over work items in chunks of batch size.
/// 6. Check if we reached the end of the light slot.
/// 7. Asynchronously send all transactions in the batch
/// 8. Await minimum batch time.
/// 9. Check if we reached max number of batches.
///
/// Questions:
/// - How do we make sure that we have send all the transactions?
/// - How can we monitor how many txs have been dropped?
///
/// TODO:
/// - return number of sent transactions
/// - test timeout for any action of this function or subfunctions, timeout is
/// end of slot
/// - consider dynamic batch size based on the number of transactions in the
/// queue
pub async fn send_batched_transactions<T: TransactionBuilder, R: RpcConnection>(
payer: &Keypair,
derivation: &Pubkey,
pool: Arc<SolanaRpcPool<R>>,
config: &SendBatchedTransactionsConfig,
tree_accounts: TreeAccounts,
transaction_builder: &T,
) -> Result<usize> {
let start_time = Instant::now();
let mut rpc = pool.get_connection().await?;
let mut num_batches = 0;
let mut num_sent_transactions: usize = 0;
// 1. Execute batches until max number of batches is reached or light slot
// ended (light_slot_duration)
while num_batches < config.num_batches && (start_time.elapsed() < config.retry_config.timeout) {
debug!("Sending batch: {}", num_batches);
// 2. Fetch queue items.
let queue_length = if tree_accounts.tree_type == TreeType::State {
STATE_NULLIFIER_QUEUE_VALUES
} else {
ADDRESS_QUEUE_VALUES
};
let start_index = if tree_accounts.tree_type == TreeType::State {
config.queue_config.state_queue_start_index
} else {
config.queue_config.address_queue_start_index
};
let length = if tree_accounts.tree_type == TreeType::State {
config.queue_config.state_queue_length
} else {
config.queue_config.address_queue_length
};
let queue_item_data = fetch_queue_item_data(
&mut *rpc,
&tree_accounts.queue,
start_index,
length,
queue_length,
)
.await?;
let work_items: Vec<WorkItem> = queue_item_data
.into_iter()
.map(|data| WorkItem {
tree_account: tree_accounts,
queue_item_data: data,
})
.collect();
// 3. If work items is empty, await minimum batch time.
// If this is triggered we could switch to subscribing to the queue
if work_items.is_empty() {
debug!("No work items found, waiting for next batch");
sleep(config.retry_config.retry_delay).await;
continue;
}
// 4. Fetch recent blockhash.
// A recent blockhash is valid for 2 mins we only need one per batch. We
// use a new one per batch in case that we want to retry these same
// transactions and identical transactions might be dropped.
let recent_blockhash = rpc.get_latest_blockhash().await?;
// 5. Iterate over work items in chunks of batch size.
for work_items in
work_items.chunks(config.build_transaction_batch_config.batch_size as usize)
{
// 6. Check if we reached the end of the light slot.
if TIMEOUT_CHECK_ENABLED {
let remaining_time = match config
.retry_config
.timeout
.checked_sub(start_time.elapsed())
{
Some(time) => time,
None => {
debug!("Reached end of light slot");
break;
}
};
if remaining_time < LATENCY {
debug!("Reached end of light slot");
break;
}
}
// Minimum time to wait for the next batch of transactions.
// Can be used to avoid rate limits.
let transaction_build_time_start = Instant::now();
let transactions: Vec<Transaction> = transaction_builder
.build_signed_transaction_batch(
payer,
derivation,
&recent_blockhash,
work_items,
config.build_transaction_batch_config,
)
.await?;
debug!(
"build transaction time {:?}",
transaction_build_time_start.elapsed()
);
let batch_start = Instant::now();
if TIMEOUT_CHECK_ENABLED {
let remaining_time = config
.retry_config
.timeout
.saturating_sub(start_time.elapsed());
if remaining_time < LATENCY {
debug!("Reached end of light slot");
break;
}
}
// Asynchronously send all transactions in the batch
let pool_clone = Arc::clone(&pool);
let send_futures = transactions.into_iter().map(move |tx| {
let pool_clone = Arc::clone(&pool_clone);
tokio::spawn(async move {
match pool_clone.get_connection().await {
Ok(mut rpc) => {
let result = rpc.process_transaction(tx).await;
println!("tx result: {:?}", result);
result
}
Err(e) => Err(light_client::rpc::RpcError::CustomError(format!(
"Failed to get RPC connection: {}",
e
))),
}
})
});
let results = join_all(send_futures).await;
// Process results
for result in results {
match result {
Ok(Ok(_)) => num_sent_transactions += 1,
Ok(Err(e)) => warn!("Transaction failed: {:?}", e),
Err(e) => warn!("Task failed: {:?}", e),
}
}
num_batches += 1;
let batch_duration = batch_start.elapsed();
debug!("Batch duration: {:?}", batch_duration);
// 8. Await minimum batch time.
if start_time.elapsed() + config.retry_config.retry_delay < config.retry_config.timeout
{
sleep(config.retry_config.retry_delay).await;
} else {
break;
}
// 9. Check if we reached max number of batches.
if num_batches >= config.num_batches {
debug!("Reached max number of batches");
break;
}
}
}
debug!("Sent {} transactions", num_sent_transactions);
Ok(num_sent_transactions)
}
#[derive(Debug, Clone, Copy)]
pub struct SendBatchedTransactionsConfig {
pub num_batches: u64,
pub build_transaction_batch_config: BuildTransactionBatchConfig,
pub queue_config: QueueConfig,
pub retry_config: RetryConfig,
pub light_slot_length: u64,
}
#[derive(Debug, Clone, Copy)]
pub struct BuildTransactionBatchConfig {
pub batch_size: u64,
pub compute_unit_price: Option<u64>,
pub compute_unit_limit: Option<u32>,
}
pub struct EpochManagerTransactions<R: RpcConnection, I: Indexer<R>> {
pub indexer: Arc<Mutex<I>>,
pub epoch: u64,
pub phantom: std::marker::PhantomData<R>,
}
#[async_trait]
impl<R: RpcConnection, I: Indexer<R>> TransactionBuilder for EpochManagerTransactions<R, I> {
async fn build_signed_transaction_batch(
&self,
payer: &Keypair,
derivation: &Pubkey,
recent_blockhash: &Hash,
work_items: &[WorkItem],
config: BuildTransactionBatchConfig,
) -> Result<Vec<Transaction>> {
let mut transactions = vec![];
let (_, all_instructions) = fetch_proofs_and_create_instructions(
payer.pubkey(),
*derivation,
self.indexer.clone(),
self.epoch,
work_items,
)
.await?;
for instruction in all_instructions {
let transaction = build_signed_transaction(
payer,
recent_blockhash,
config.compute_unit_price,
config.compute_unit_limit,
instruction,
)
.await;
transactions.push(transaction);
}
Ok(transactions)
}
}
async fn build_signed_transaction(
payer: &Keypair,
recent_blockhash: &Hash,
compute_unit_price: Option<u64>,
compute_unit_limit: Option<u32>,
instruction: Instruction,
) -> Transaction {
let mut instructions: Vec<Instruction> = if let Some(price) = compute_unit_price {
vec![ComputeBudgetInstruction::set_compute_unit_price(price)]
} else {
vec![]
};
if let Some(limit) = compute_unit_limit {
instructions.push(ComputeBudgetInstruction::set_compute_unit_limit(limit));
}
instructions.push(instruction);
let mut transaction =
Transaction::new_with_payer(instructions.as_slice(), Some(&payer.pubkey()));
transaction.sign(&[payer], *recent_blockhash);
transaction
}
/// Work items should be of only one type and tree
pub async fn fetch_proofs_and_create_instructions<R: RpcConnection, I: Indexer<R>>(
authority: Pubkey,
derivation: Pubkey,
indexer: Arc<Mutex<I>>,
epoch: u64,
work_items: &[WorkItem],
) -> Result<(Vec<MerkleProofType>, Vec<Instruction>)> {
let mut proofs = Vec::new();
let mut instructions = vec![];
let (address_items, state_items): (Vec<_>, Vec<_>) = work_items
.iter()
.partition(|item| matches!(item.tree_account.tree_type, TreeType::Address));
// Prepare data for batch fetching
let address_data = if !address_items.is_empty() {
let merkle_tree = address_items
.first()
.ok_or_else(|| ForesterError::Custom("No address items found".to_string()))?
.tree_account
.merkle_tree
.to_bytes();
let addresses: Vec<[u8; 32]> = address_items
.iter()
.map(|item| item.queue_item_data.hash)
.collect();
Some((merkle_tree, addresses))
} else {
None
};
let state_data = if !state_items.is_empty() {
let states: Vec<String> = state_items
.iter()
.map(|item| bs58::encode(&item.queue_item_data.hash).into_string())
.collect();
Some(states)
} else {
None
};
// Fetch all proofs in parallel
let (address_proofs, state_proofs) = {
let indexer = indexer.lock().await;
let address_future = async {
if let Some((merkle_tree, addresses)) = address_data {
indexer
.get_multiple_new_address_proofs(merkle_tree, addresses)
.await
} else {
Ok(vec![])
}
};
let state_future = async {
if let Some(states) = state_data {
indexer.get_multiple_compressed_account_proofs(states).await
} else {
Ok(vec![])
}
};
join!(address_future, state_future)
};
let address_proofs = address_proofs?;
let state_proofs = state_proofs?;
// Process address proofs and create instructions
for (item, proof) in address_items.iter().zip(address_proofs.into_iter()) {
proofs.push(MerkleProofType::AddressProof(proof.clone()));
let instruction = create_update_address_merkle_tree_instruction(
UpdateAddressMerkleTreeInstructionInputs {
authority,
derivation,
address_merkle_tree: item.tree_account.merkle_tree,
address_queue: item.tree_account.queue,
value: item.queue_item_data.index as u16,
low_address_index: proof.low_address_index,
low_address_value: proof.low_address_value,
low_address_next_index: proof.low_address_next_index,
low_address_next_value: proof.low_address_next_value,
low_address_proof: proof.low_address_proof,
changelog_index: (proof.root_seq % ADDRESS_MERKLE_TREE_CHANGELOG) as u16,
indexed_changelog_index: (proof.root_seq % ADDRESS_MERKLE_TREE_INDEXED_CHANGELOG)
as u16,
is_metadata_forester: false,
},
epoch,
);
instructions.push(instruction);
}
// Process state proofs and create instructions
for (item, proof) in state_items.iter().zip(state_proofs.into_iter()) {
proofs.push(MerkleProofType::StateProof(proof.clone()));
let instruction = create_nullify_instruction(
CreateNullifyInstructionInputs {
nullifier_queue: item.tree_account.queue,
merkle_tree: item.tree_account.merkle_tree,
change_log_indices: vec![proof.root_seq % STATE_MERKLE_TREE_CHANGELOG],
leaves_queue_indices: vec![item.queue_item_data.index as u16],
indices: vec![proof.leaf_index],
proofs: vec![proof.proof.clone()],
authority,
derivation,
is_metadata_forester: false,
},
epoch,
);
instructions.push(instruction);
}
Ok((proofs, instructions))
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/main.rs
|
use clap::Parser;
use forester::cli::{Cli, Commands};
use forester::errors::ForesterError;
use forester::metrics::register_metrics;
use forester::photon_indexer::PhotonIndexer;
use forester::telemetry::setup_telemetry;
use forester::{forester_status, run_pipeline, ForesterConfig};
use light_client::rpc::{RpcConnection, SolanaRpcConnection};
use std::sync::Arc;
use tokio::signal::ctrl_c;
use tokio::sync::{mpsc, oneshot};
use tracing::debug;
#[tokio::main]
async fn main() -> Result<(), ForesterError> {
setup_telemetry();
let cli = Cli::parse();
match &cli.command {
Commands::Start(args) => {
let config = Arc::new(ForesterConfig::new_for_start(args)?);
if config.general_config.enable_metrics {
register_metrics();
}
let (shutdown_sender, shutdown_receiver) = oneshot::channel();
let (work_report_sender, mut work_report_receiver) = mpsc::channel(100);
tokio::spawn(async move {
ctrl_c().await.expect("Failed to listen for Ctrl+C");
shutdown_sender
.send(())
.expect("Failed to send shutdown signal");
});
tokio::spawn(async move {
while let Some(report) = work_report_receiver.recv().await {
debug!("Work Report: {:?}", report);
}
});
let indexer_rpc =
SolanaRpcConnection::new(config.external_services.rpc_url.clone(), None);
let indexer = Arc::new(tokio::sync::Mutex::new(PhotonIndexer::new(
config.external_services.indexer_url.clone().unwrap(),
config.external_services.photon_api_key.clone(),
indexer_rpc,
)));
run_pipeline(config, indexer, shutdown_receiver, work_report_sender).await?
}
Commands::Status(args) => {
forester_status::fetch_forester_status(args).await;
}
}
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/tree_finder.rs
|
use crate::tree_data_sync::fetch_trees;
use crate::Result;
use forester_utils::forester_epoch::TreeAccounts;
use light_client::rpc::RpcConnection;
use light_client::rpc_pool::SolanaRpcPool;
use std::sync::Arc;
use tokio::sync::broadcast;
use tokio::time::{interval, Duration};
use tracing::{debug, error, info};
pub struct TreeFinder<R: RpcConnection> {
rpc_pool: Arc<SolanaRpcPool<R>>,
known_trees: Vec<TreeAccounts>,
new_tree_sender: broadcast::Sender<TreeAccounts>,
check_interval: Duration,
}
impl<R: RpcConnection> TreeFinder<R> {
pub fn new(
rpc_pool: Arc<SolanaRpcPool<R>>,
initial_trees: Vec<TreeAccounts>,
new_tree_sender: broadcast::Sender<TreeAccounts>,
check_interval: Duration,
) -> Self {
Self {
rpc_pool,
known_trees: initial_trees,
new_tree_sender,
check_interval,
}
}
pub async fn run(&mut self) -> Result<()> {
let mut interval = interval(self.check_interval);
loop {
interval.tick().await;
debug!("Checking for new trees");
match self.check_for_new_trees().await {
Ok(new_trees) => {
for tree in new_trees {
if let Err(e) = self.new_tree_sender.send(tree) {
error!("Failed to send new tree: {:?}", e);
} else {
info!("New tree discovered and sent: {:?}", tree);
self.known_trees.push(tree);
}
}
}
Err(e) => {
error!("Error checking for new trees: {:?}", e);
}
}
tokio::task::yield_now().await;
}
}
async fn check_for_new_trees(&self) -> Result<Vec<TreeAccounts>> {
let rpc = self.rpc_pool.get_connection().await?;
let current_trees = fetch_trees(&*rpc).await?;
let new_trees: Vec<TreeAccounts> = current_trees
.into_iter()
.filter(|tree| !self.known_trees.contains(tree))
.collect();
Ok(new_trees)
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/tree_data_sync.rs
|
use crate::Result;
use account_compression::utils::check_discrimininator::check_discriminator;
use account_compression::{AddressMerkleTreeAccount, MerkleTreeMetadata, StateMerkleTreeAccount};
use borsh::BorshDeserialize;
use forester_utils::forester_epoch::{TreeAccounts, TreeType};
use light_client::rpc::RpcConnection;
use solana_sdk::account::Account;
use solana_sdk::pubkey::Pubkey;
use tracing::debug;
pub async fn fetch_trees<R: RpcConnection>(rpc: &R) -> Result<Vec<TreeAccounts>> {
let program_id = account_compression::id();
debug!("Fetching accounts for program: {}", program_id);
Ok(rpc
.get_program_accounts(&program_id)
.await?
.into_iter()
.filter_map(|(pubkey, account)| process_account(pubkey, account))
.collect())
}
fn process_account(pubkey: Pubkey, account: Account) -> Option<TreeAccounts> {
process_state_account(&account, pubkey)
.or_else(|_| process_address_account(&account, pubkey))
.ok()
}
fn process_state_account(account: &Account, pubkey: Pubkey) -> Result<TreeAccounts> {
check_discriminator::<StateMerkleTreeAccount>(&account.data)?;
let tree_account = StateMerkleTreeAccount::deserialize(&mut &account.data[8..])?;
Ok(create_tree_accounts(
pubkey,
&tree_account.metadata,
TreeType::State,
))
}
fn process_address_account(account: &Account, pubkey: Pubkey) -> Result<TreeAccounts> {
check_discriminator::<AddressMerkleTreeAccount>(&account.data)?;
let tree_account = AddressMerkleTreeAccount::deserialize(&mut &account.data[8..])?;
Ok(create_tree_accounts(
pubkey,
&tree_account.metadata,
TreeType::Address,
))
}
fn create_tree_accounts(
pubkey: Pubkey,
metadata: &MerkleTreeMetadata,
tree_type: TreeType,
) -> TreeAccounts {
let tree_accounts = TreeAccounts::new(
pubkey,
metadata.associated_queue,
tree_type,
metadata.rollover_metadata.rolledover_slot != u64::MAX,
);
debug!(
"{:?} Merkle Tree account found. Pubkey: {}. Queue pubkey: {}",
tree_type, pubkey, tree_accounts.queue
);
tree_accounts
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/utils.rs
|
use light_client::rpc::RpcConnection;
use light_registry::protocol_config::state::{ProtocolConfig, ProtocolConfigPda};
use light_registry::utils::get_protocol_config_pda_address;
use std::time::{SystemTime, UNIX_EPOCH};
use tracing::debug;
pub fn decode_hash(account: &str) -> [u8; 32] {
let bytes = bs58::decode(account).into_vec().unwrap();
let mut arr = [0u8; 32];
arr.copy_from_slice(&bytes);
arr
}
pub async fn get_protocol_config<R: RpcConnection>(rpc: &mut R) -> ProtocolConfig {
let authority_pda = get_protocol_config_pda_address();
let protocol_config_account = rpc
.get_anchor_account::<ProtocolConfigPda>(&authority_pda.0)
.await
.unwrap()
.unwrap();
debug!("Protocol config account: {:?}", protocol_config_account);
protocol_config_account.config
}
pub fn get_current_system_time_ms() -> u128 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_millis()
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/cli.rs
|
use clap::{Parser, Subcommand};
#[derive(Parser)]
#[clap(author, version, about, long_about = None)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
}
#[allow(clippy::large_enum_variant)]
#[derive(Subcommand)]
pub enum Commands {
Start(StartArgs),
Status(StatusArgs),
}
#[derive(Parser, Clone, Debug)]
pub struct StartArgs {
#[arg(long, env = "FORESTER_RPC_URL")]
pub rpc_url: Option<String>,
#[arg(long, env = "FORESTER_PUSH_GATEWAY_URL")]
pub push_gateway_url: Option<String>,
#[arg(long, env = "FORESTER_PAGERDUTY_ROUTING_KEY")]
pub pagerduty_routing_key: Option<String>,
#[arg(long, env = "FORESTER_WS_RPC_URL")]
pub ws_rpc_url: Option<String>,
#[arg(long, env = "FORESTER_INDEXER_URL")]
pub indexer_url: Option<String>,
#[arg(long, env = "FORESTER_PROVER_URL")]
pub prover_url: Option<String>,
#[arg(long, env = "FORESTER_PAYER")]
pub payer: Option<String>,
#[arg(long, env = "FORESTER_DERIVATION_PUBKEY")]
pub derivation: Option<String>,
#[arg(long, env = "FORESTER_PHOTON_API_KEY")]
pub photon_api_key: Option<String>,
#[arg(long, env = "FORESTER_INDEXER_BATCH_SIZE", default_value = "50")]
pub indexer_batch_size: usize,
#[arg(
long,
env = "FORESTER_INDEXER_MAX_CONCURRENT_BATCHES",
default_value = "10"
)]
pub indexer_max_concurrent_batches: usize,
#[arg(long, env = "FORESTER_TRANSACTION_BATCH_SIZE", default_value = "1")]
pub transaction_batch_size: usize,
#[arg(
long,
env = "FORESTER_TRANSACTION_MAX_CONCURRENT_BATCHES",
default_value = "20"
)]
pub transaction_max_concurrent_batches: usize,
#[arg(long, env = "FORESTER_CU_LIMIT", default_value = "1000000")]
pub cu_limit: u32,
#[arg(long, env = "FORESTER_RPC_POOL_SIZE", default_value = "20")]
pub rpc_pool_size: usize,
#[arg(
long,
env = "FORESTER_SLOT_UPDATE_INTERVAL_SECONDS",
default_value = "10"
)]
pub slot_update_interval_seconds: u64,
#[arg(
long,
env = "FORESTER_TREE_DISCOVERY_INTERVAL_SECONDS",
default_value = "5"
)]
pub tree_discovery_interval_seconds: u64,
#[arg(long, env = "FORESTER_MAX_RETRIES", default_value = "3")]
pub max_retries: u32,
#[arg(long, env = "FORESTER_RETRY_DELAY", default_value = "1000")]
pub retry_delay: u64,
#[arg(long, env = "FORESTER_RETRY_TIMEOUT", default_value = "30000")]
pub retry_timeout: u64,
#[arg(long, env = "FORESTER_STATE_QUEUE_START_INDEX", default_value = "0")]
pub state_queue_start_index: u16,
#[arg(
long,
env = "FORESTER_STATE_PROCESSING_LENGTH",
default_value = "28807"
)]
pub state_queue_processing_length: u16,
#[arg(long, env = "FORESTER_ADDRESS_QUEUE_START_INDEX", default_value = "0")]
pub address_queue_start_index: u16,
#[arg(
long,
env = "FORESTER_ADDRESS_PROCESSING_LENGTH",
default_value = "28807"
)]
pub address_queue_processing_length: u16,
}
#[derive(Parser, Clone, Debug)]
pub struct StatusArgs {
#[arg(long, env = "FORESTER_RPC_URL")]
pub rpc_url: String,
#[arg(long, env = "FORESTER_PUSH_GATEWAY_URL")]
pub push_gateway_url: Option<String>,
#[arg(long, env = "FORESTER_PAGERDUTY_ROUTING_KEY")]
pub pagerduty_routing_key: Option<String>,
/// Select to run compressed token program tests.
#[clap(long)]
pub full: bool,
#[clap(long)]
pub protocol_config: bool,
#[clap(long, default_value_t = true)]
pub queue: bool,
}
impl StartArgs {
pub fn enable_metrics(&self) -> bool {
self.push_gateway_url.is_some()
}
}
impl StatusArgs {
pub fn enable_metrics(&self) -> bool {
self.push_gateway_url.is_some()
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/epoch_manager.rs
|
use crate::errors::ForesterError;
use crate::queue_helpers::QueueItemData;
use crate::rollover::{
is_tree_ready_for_rollover, rollover_address_merkle_tree, rollover_state_merkle_tree,
};
use crate::send_transaction::{
send_batched_transactions, BuildTransactionBatchConfig, EpochManagerTransactions,
SendBatchedTransactionsConfig,
};
use crate::slot_tracker::{slot_duration, wait_until_slot_reached, SlotTracker};
use crate::tree_data_sync::fetch_trees;
use crate::Result;
use crate::{ForesterConfig, ForesterEpochInfo};
use light_client::rpc_pool::SolanaRpcPool;
use crate::metrics::{push_metrics, queue_metric_update, update_forester_sol_balance};
use crate::pagerduty::send_pagerduty_alert;
use crate::tree_finder::TreeFinder;
use dashmap::DashMap;
use forester_utils::forester_epoch::{
get_epoch_phases, Epoch, TreeAccounts, TreeForesterSchedule, TreeType,
};
use forester_utils::indexer::{Indexer, MerkleProof, NewAddressProofWithContext};
use futures::future::join_all;
use light_client::rpc::{RetryConfig, RpcConnection, RpcError, SolanaRpcConnection};
use light_registry::errors::RegistryError;
use light_registry::protocol_config::state::ProtocolConfig;
use light_registry::sdk::{
create_finalize_registration_instruction, create_report_work_instruction,
};
use light_registry::utils::{get_epoch_pda_address, get_forester_epoch_pda_from_authority};
use light_registry::{EpochPda, ForesterEpochPda};
use solana_program::instruction::InstructionError;
use solana_program::pubkey::Pubkey;
use solana_sdk::signature::Signer;
use solana_sdk::transaction::TransactionError;
use std::collections::HashMap;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::broadcast::error::RecvError;
use tokio::sync::{broadcast, mpsc, oneshot, Mutex};
use tokio::task::JoinHandle;
use tokio::time::{sleep, Instant};
use tracing::{debug, error, info, info_span, instrument, warn};
#[derive(Clone, Debug)]
pub struct WorkReport {
pub epoch: u64,
pub processed_items: usize,
}
#[derive(Debug, Clone)]
pub struct WorkItem {
pub tree_account: TreeAccounts,
pub queue_item_data: QueueItemData,
}
impl WorkItem {
pub fn is_address_tree(&self) -> bool {
self.tree_account.tree_type == TreeType::Address
}
pub fn is_state_tree(&self) -> bool {
self.tree_account.tree_type == TreeType::State
}
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone)]
pub enum MerkleProofType {
AddressProof(NewAddressProofWithContext),
StateProof(MerkleProof),
}
#[derive(Debug)]
pub struct EpochManager<R: RpcConnection, I: Indexer<R>> {
config: Arc<ForesterConfig>,
protocol_config: Arc<ProtocolConfig>,
rpc_pool: Arc<SolanaRpcPool<R>>,
indexer: Arc<Mutex<I>>,
work_report_sender: mpsc::Sender<WorkReport>,
processed_items_per_epoch_count: Arc<Mutex<HashMap<u64, AtomicUsize>>>,
trees: Arc<Mutex<Vec<TreeAccounts>>>,
slot_tracker: Arc<SlotTracker>,
processing_epochs: Arc<DashMap<u64, Arc<AtomicBool>>>,
new_tree_sender: broadcast::Sender<TreeAccounts>,
}
impl<R: RpcConnection, I: Indexer<R>> Clone for EpochManager<R, I> {
fn clone(&self) -> Self {
Self {
config: self.config.clone(),
protocol_config: self.protocol_config.clone(),
rpc_pool: self.rpc_pool.clone(),
indexer: self.indexer.clone(),
work_report_sender: self.work_report_sender.clone(),
processed_items_per_epoch_count: self.processed_items_per_epoch_count.clone(),
trees: self.trees.clone(),
slot_tracker: self.slot_tracker.clone(),
processing_epochs: self.processing_epochs.clone(),
new_tree_sender: self.new_tree_sender.clone(),
}
}
}
impl<R: RpcConnection, I: Indexer<R>> EpochManager<R, I> {
#[allow(clippy::too_many_arguments)]
pub async fn new(
config: Arc<ForesterConfig>,
protocol_config: Arc<ProtocolConfig>,
rpc_pool: Arc<SolanaRpcPool<R>>,
indexer: Arc<Mutex<I>>,
work_report_sender: mpsc::Sender<WorkReport>,
trees: Vec<TreeAccounts>,
slot_tracker: Arc<SlotTracker>,
new_tree_sender: broadcast::Sender<TreeAccounts>,
) -> Result<Self> {
Ok(Self {
config,
protocol_config,
rpc_pool,
indexer,
work_report_sender,
processed_items_per_epoch_count: Arc::new(Mutex::new(HashMap::new())),
trees: Arc::new(Mutex::new(trees)),
slot_tracker,
processing_epochs: Arc::new(DashMap::new()),
new_tree_sender,
})
}
pub async fn run(self: Arc<Self>) -> Result<()> {
let (tx, mut rx) = mpsc::channel(100);
let tx = Arc::new(tx);
let monitor_handle = tokio::spawn({
let self_clone = Arc::clone(&self);
let tx_clone = Arc::clone(&tx);
async move { self_clone.monitor_epochs(tx_clone).await }
});
// Process current and previous epochs
let current_previous_handle = tokio::spawn({
let self_clone = Arc::clone(&self);
let tx_clone = Arc::clone(&tx);
async move {
self_clone
.process_current_and_previous_epochs(tx_clone)
.await
}
});
let new_tree_handle = tokio::spawn({
let self_clone = Arc::clone(&self);
async move { self_clone.handle_new_trees().await }
});
let balance_check_handle = tokio::spawn({
let self_clone = Arc::clone(&self);
async move { self_clone.check_sol_balance_periodically().await }
});
while let Some(epoch) = rx.recv().await {
debug!("Received new epoch: {}", epoch);
let self_clone = Arc::clone(&self);
tokio::spawn(async move {
if let Err(e) = self_clone.process_epoch(epoch).await {
error!("Error processing epoch {}: {:?}", epoch, e);
}
});
}
monitor_handle.await??;
current_previous_handle.await??;
new_tree_handle.await??;
balance_check_handle.await??;
Ok(())
}
async fn check_sol_balance_periodically(self: Arc<Self>) -> Result<()> {
let interval = Duration::from_secs(60);
let mut interval_timer = tokio::time::interval(interval);
loop {
interval_timer.tick().await;
let mut rpc = self.rpc_pool.get_connection().await?;
let balance = rpc.get_balance(&self.config.payer_keypair.pubkey()).await?;
let balance_in_sol = balance as f64 / 1e9;
update_forester_sol_balance(
&self.config.payer_keypair.pubkey().to_string(),
balance_in_sol,
);
info!("Current SOL balance: {} SOL", balance_in_sol);
tokio::task::yield_now().await;
}
}
async fn handle_new_trees(self: Arc<Self>) -> Result<()> {
let mut receiver = self.new_tree_sender.subscribe();
loop {
match receiver.recv().await {
Ok(new_tree) => {
info!("Received new tree: {:?}", new_tree);
self.add_new_tree(new_tree).await?;
}
Err(e) => match e {
RecvError::Lagged(lag) => {
warn!("Lagged in receiving new trees: {:?}", lag);
}
RecvError::Closed => {
info!("New tree receiver closed");
break;
}
},
}
}
Ok(())
}
async fn add_new_tree(&self, new_tree: TreeAccounts) -> Result<()> {
info!("Adding new tree: {:?}", new_tree);
let mut trees = self.trees.lock().await;
trees.push(new_tree);
drop(trees);
info!("New tree added to the list of trees");
let (current_slot, current_epoch) = self.get_current_slot_and_epoch().await?;
let phases = get_epoch_phases(&self.protocol_config, current_epoch);
// Check if we're currently in the active phase
if current_slot >= phases.active.start && current_slot < phases.active.end {
info!("Currently in active phase. Attempting to process the new tree immediately.");
info!("Recovering registration info...");
if let Ok(mut epoch_info) = self.recover_registration_info(current_epoch).await {
info!("Recovered registration info for current epoch");
let tree_schedule = TreeForesterSchedule::new_with_schedule(
&new_tree,
current_slot,
&epoch_info.forester_epoch_pda,
&epoch_info.epoch_pda,
);
epoch_info.trees.push(tree_schedule.clone());
let self_clone = Arc::new(self.clone());
info!("Spawning task to process new tree in current epoch");
tokio::spawn(async move {
if let Err(e) = self_clone
.process_queue(
&epoch_info.epoch,
&epoch_info.forester_epoch_pda,
tree_schedule,
)
.await
{
error!("Error processing queue for new tree: {:?}", e);
} else {
info!("Successfully processed new tree in current epoch");
}
});
info!(
"Injected new tree into current epoch {}: {:?}",
current_epoch, new_tree
);
} else {
warn!("Failed to retrieve current epoch info for processing new tree");
}
} else {
info!("Not in active phase. New tree will be processed in the next active phase");
}
Ok(())
}
#[instrument(level = "debug", skip(self, tx))]
async fn monitor_epochs(&self, tx: Arc<mpsc::Sender<u64>>) -> Result<()> {
let mut last_epoch: Option<u64> = None;
debug!("Starting epoch monitor");
loop {
let (slot, current_epoch) = self.get_current_slot_and_epoch().await?;
debug!(
"last_epoch: {:?}, current_epoch: {:?}, slot: {:?}",
last_epoch, current_epoch, slot
);
if last_epoch.map_or(true, |last| current_epoch > last) {
debug!("New epoch detected: {}", current_epoch);
let phases = get_epoch_phases(&self.protocol_config, current_epoch);
if slot < phases.registration.end {
tx.send(current_epoch).await.map_err(|e| {
ForesterError::Custom(format!("Failed to send new epoch: {}", e))
})?;
last_epoch = Some(current_epoch);
}
}
let next_epoch = current_epoch + 1;
let next_phases = get_epoch_phases(&self.protocol_config, next_epoch);
let mut rpc = self.rpc_pool.get_connection().await?;
let slots_to_wait = next_phases.registration.start.saturating_sub(slot);
debug!(
"Waiting for epoch {} registration phase to start. Current slot: {}, Registration phase start slot: {}, Slots to wait: {}",
next_epoch, slot, next_phases.registration.start, slots_to_wait
);
if let Err(e) = wait_until_slot_reached(
&mut *rpc,
&self.slot_tracker,
next_phases.registration.start,
)
.await
{
error!("Error waiting for next registration phase: {:?}", e);
continue;
}
}
}
async fn get_processed_items_count(&self, epoch: u64) -> usize {
let counts = self.processed_items_per_epoch_count.lock().await;
counts
.get(&epoch)
.map_or(0, |count| count.load(Ordering::Relaxed))
}
async fn increment_processed_items_count(&self, epoch: u64, increment_by: usize) {
let mut counts = self.processed_items_per_epoch_count.lock().await;
counts
.entry(epoch)
.or_insert_with(|| AtomicUsize::new(0))
.fetch_add(increment_by, Ordering::Relaxed);
}
async fn recover_registration_info(&self, epoch: u64) -> Result<ForesterEpochInfo> {
debug!("Recovering registration info for epoch {}", epoch);
let forester_epoch_pda_pubkey =
get_forester_epoch_pda_from_authority(&self.config.derivation_pubkey, epoch).0;
let mut rpc = self.rpc_pool.get_connection().await?;
let existing_pda = rpc
.get_anchor_account::<ForesterEpochPda>(&forester_epoch_pda_pubkey)
.await?;
match existing_pda {
Some(pda) => {
self.recover_registration_info_internal(epoch, forester_epoch_pda_pubkey, pda)
.await
}
None => Err(ForesterError::ForesterEpochPdaNotFound),
}
}
#[instrument(level = "debug", skip(self))]
async fn process_current_and_previous_epochs(&self, tx: Arc<mpsc::Sender<u64>>) -> Result<()> {
let (slot, current_epoch) = self.get_current_slot_and_epoch().await?;
let current_phases = get_epoch_phases(&self.protocol_config, current_epoch);
let previous_epoch = current_epoch.saturating_sub(1);
// Process previous epoch if still in active or later phase
if slot > current_phases.registration.start {
debug!("Processing previous epoch: {}", previous_epoch);
tx.send(previous_epoch).await.map_err(|e| {
ForesterError::Custom(format!("Failed to send previous epoch: {}", e))
})?;
}
// Process current epoch
debug!("Processing current epoch: {}", current_epoch);
tx.send(current_epoch)
.await
.map_err(|e| ForesterError::Custom(format!("Failed to send current epoch: {}", e)))?;
debug!("Finished processing current and previous epochs");
Ok(())
}
#[instrument(level = "debug", skip(self), fields(forester = %self.config.payer_keypair.pubkey(), epoch = epoch
))]
async fn process_epoch(&self, epoch: u64) -> Result<()> {
info!("Entering process_epoch");
let processing_flag = self
.processing_epochs
.entry(epoch)
.or_insert_with(|| Arc::new(AtomicBool::new(false)));
if processing_flag
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
.is_err()
{
// Another task is already processing this epoch
debug!("Epoch {} is already being processed, skipping", epoch);
return Ok(());
}
let phases = get_epoch_phases(&self.protocol_config, epoch);
// Attempt to recover registration info
debug!("Recovering registration info for epoch {}", epoch);
let mut registration_info = match self.recover_registration_info(epoch).await {
Ok(info) => info,
Err(e) => {
warn!("Failed to recover registration info: {:?}", e);
// If recovery fails, attempt to register
self.register_for_epoch_with_retry(epoch, 20, Duration::from_millis(1000))
.await?
}
};
debug!("Recovered registration info for epoch {}", epoch);
// Wait for active phase
registration_info = self.wait_for_active_phase(®istration_info).await?;
// Perform work
if self.sync_slot().await? < phases.active.end {
self.perform_active_work(®istration_info).await?;
}
// Wait for report work phase
if self.sync_slot().await? < phases.report_work.start {
self.wait_for_report_work_phase(®istration_info).await?;
}
// Report work
if self.sync_slot().await? < phases.report_work.end {
self.report_work(®istration_info).await?;
}
// TODO: implement
// self.claim(®istration_info).await?;
// Ensure we reset the processing flag when we're done
let _reset_guard = scopeguard::guard((), |_| {
processing_flag.store(false, Ordering::SeqCst);
});
info!("Exiting process_epoch");
Ok(())
}
async fn get_current_slot_and_epoch(&self) -> Result<(u64, u64)> {
let slot = self.slot_tracker.estimated_current_slot();
Ok((slot, self.protocol_config.get_current_epoch(slot)))
}
#[instrument(level = "debug", skip(self), fields(forester = %self.config.payer_keypair.pubkey(), epoch = epoch
))]
async fn register_for_epoch_with_retry(
&self,
epoch: u64,
max_retries: u32,
retry_delay: Duration,
) -> Result<ForesterEpochInfo> {
let mut rpc = self.rpc_pool.get_connection().await?;
let slot = rpc.get_slot().await?;
let phases = get_epoch_phases(&self.protocol_config, epoch);
// Check if it's already too late to register
if slot >= phases.registration.end {
return Err(ForesterError::Custom(format!(
"Too late to register for epoch {}. Current slot: {}, Registration end: {}",
epoch, slot, phases.registration.end
)));
}
for attempt in 0..max_retries {
match self.register_for_epoch(epoch).await {
Ok(registration_info) => return Ok(registration_info),
Err(e) => {
warn!(
"Failed to register for epoch {} (attempt {}): {:?}",
epoch,
attempt + 1,
e
);
if attempt < max_retries - 1 {
sleep(retry_delay).await;
} else {
if let Err(alert_err) = send_pagerduty_alert(
&self
.config
.external_services
.pagerduty_routing_key
.clone()
.unwrap(),
&format!(
"Forester failed to register for epoch {} after {} attempts",
epoch, max_retries
),
"critical",
&format!("Forester {}", self.config.payer_keypair.pubkey()),
)
.await
{
error!("Failed to send PagerDuty alert: {:?}", alert_err);
}
return Err(e);
}
}
}
}
Err(ForesterError::Custom(format!(
"Failed to register for epoch {} after {} attempts",
epoch, max_retries
)))
}
#[instrument(level = "debug", skip(self), fields(forester = %self.config.payer_keypair.pubkey(), epoch = epoch
))]
async fn register_for_epoch(&self, epoch: u64) -> Result<ForesterEpochInfo> {
info!("Registering for epoch: {}", epoch);
let mut rpc =
SolanaRpcConnection::new(self.config.external_services.rpc_url.as_str(), None);
let slot = rpc.get_slot().await?;
let phases = get_epoch_phases(&self.protocol_config, epoch);
if slot < phases.registration.end {
let forester_epoch_pda_pubkey =
get_forester_epoch_pda_from_authority(&self.config.derivation_pubkey, epoch).0;
let existing_registration = rpc
.get_anchor_account::<ForesterEpochPda>(&forester_epoch_pda_pubkey)
.await?;
if let Some(existing_pda) = existing_registration {
info!(
"Already registered for epoch {}. Recovering registration info.",
epoch
);
let registration_info = self
.recover_registration_info_internal(
epoch,
forester_epoch_pda_pubkey,
existing_pda,
)
.await?;
return Ok(registration_info);
}
let registration_info = {
debug!("Registering epoch {}", epoch);
let registered_epoch = match Epoch::register(
&mut rpc,
&self.protocol_config,
&self.config.payer_keypair,
&self.config.derivation_pubkey,
)
.await
{
Ok(Some(epoch)) => {
debug!("Registered epoch: {:?}", epoch);
epoch
}
Ok(None) => {
return Err(ForesterError::Custom(
"Epoch::register returned None".into(),
))
}
Err(e) => {
return Err(ForesterError::Custom(format!(
"Epoch::register failed: {:?}",
e
)))
}
};
let forester_epoch_pda = match rpc
.get_anchor_account::<ForesterEpochPda>(®istered_epoch.forester_epoch_pda)
.await
{
Ok(Some(pda)) => {
debug!("ForesterEpochPda: {:?}", pda);
pda
}
Ok(None) => {
return Err(ForesterError::Custom(
"Failed to get ForesterEpochPda: returned None".into(),
))
}
Err(e) => {
return Err(ForesterError::Custom(format!(
"Failed to get ForesterEpochPda: {:?}",
e
)))
}
};
let epoch_pda_address = get_epoch_pda_address(epoch);
let epoch_pda = match rpc
.get_anchor_account::<EpochPda>(&epoch_pda_address)
.await?
{
Some(pda) => pda,
None => {
return Err(ForesterError::Custom(
"Failed to get EpochPda: returned None".into(),
))
}
};
ForesterEpochInfo {
epoch: registered_epoch,
epoch_pda,
forester_epoch_pda,
trees: Vec::new(),
}
};
debug!("Registration for epoch completed");
debug!("Registration Info: {:?}", registration_info);
Ok(registration_info)
} else {
warn!(
"Too late to register for epoch {}. Current slot: {}, Registration end: {}",
epoch, slot, phases.registration.end
);
Err(ForesterError::Custom(
"Too late to register for epoch".into(),
))
}
}
async fn recover_registration_info_internal(
&self,
epoch: u64,
forester_epoch_pda_address: Pubkey,
forester_epoch_pda: ForesterEpochPda,
) -> Result<ForesterEpochInfo> {
let mut rpc = self.rpc_pool.get_connection().await?;
let phases = get_epoch_phases(&self.protocol_config, epoch);
let slot = rpc.get_slot().await?;
let state = phases.get_current_epoch_state(slot);
let epoch_pda_address = get_epoch_pda_address(epoch);
let epoch_pda = match rpc
.get_anchor_account::<EpochPda>(&epoch_pda_address)
.await?
{
Some(pda) => pda,
None => {
return Err(ForesterError::Custom(
"Failed to get EpochPda: returned None".into(),
))
}
};
let epoch_info = Epoch {
epoch,
epoch_pda: epoch_pda_address,
forester_epoch_pda: forester_epoch_pda_address,
phases,
state,
merkle_trees: Vec::new(),
};
let forester_epoch_info = ForesterEpochInfo {
epoch: epoch_info,
epoch_pda,
forester_epoch_pda,
trees: Vec::new(),
};
Ok(forester_epoch_info)
}
#[instrument(level = "debug", skip(self, epoch_info), fields(forester = %self.config.payer_keypair.pubkey(), epoch = epoch_info.epoch.epoch
))]
async fn wait_for_active_phase(
&self,
epoch_info: &ForesterEpochInfo,
) -> Result<ForesterEpochInfo> {
info!("Waiting for active phase");
let mut rpc = self.rpc_pool.get_connection().await?;
let active_phase_start_slot = epoch_info.epoch.phases.active.start;
wait_until_slot_reached(&mut *rpc, &self.slot_tracker, active_phase_start_slot).await?;
let forester_epoch_pda_pubkey = get_forester_epoch_pda_from_authority(
&self.config.derivation_pubkey,
epoch_info.epoch.epoch,
)
.0;
let existing_registration = rpc
.get_anchor_account::<ForesterEpochPda>(&forester_epoch_pda_pubkey)
.await?;
if let Some(registration) = existing_registration {
if registration.total_epoch_weight.is_none() {
// TODO: we can put this ix into every tx of the first batch of the current active phase
let ix = create_finalize_registration_instruction(
&self.config.payer_keypair.pubkey(),
&self.config.derivation_pubkey,
epoch_info.epoch.epoch,
);
rpc.create_and_send_transaction(
&[ix],
&self.config.payer_keypair.pubkey(),
&[&self.config.payer_keypair],
)
.await?;
}
}
let mut epoch_info = (*epoch_info).clone();
epoch_info.forester_epoch_pda = rpc
.get_anchor_account::<ForesterEpochPda>(&epoch_info.epoch.forester_epoch_pda)
.await?
.ok_or_else(|| ForesterError::Custom("Failed to get ForesterEpochPda".to_string()))?;
let slot = rpc.get_slot().await?;
let trees = self.trees.lock().await;
epoch_info.add_trees_with_schedule(&trees, slot);
info!("Finished waiting for active phase");
Ok(epoch_info)
}
// TODO: add receiver for new tree discoverd -> spawn new task to process this tree derive schedule etc.
// TODO: optimize active phase startup time
#[instrument(
level = "debug",
skip(self, epoch_info),
fields(forester = %self.config.payer_keypair.pubkey(), epoch = epoch_info.epoch.epoch
))]
async fn perform_active_work(&self, epoch_info: &ForesterEpochInfo) -> Result<()> {
info!("Performing active work");
let current_slot = self.slot_tracker.estimated_current_slot();
let active_phase_end = epoch_info.epoch.phases.active.end;
if !self.is_in_active_phase(current_slot, epoch_info)? {
info!("No longer in active phase. Skipping work.");
return Ok(());
}
self.sync_slot().await?;
let self_arc = Arc::new(self.clone());
let epoch_info_arc = Arc::new(epoch_info.clone());
let mut handles: Vec<JoinHandle<Result<()>>> = Vec::new();
debug!(
"Creating threads for tree processing. Trees: {:?}",
epoch_info.trees
);
for tree in epoch_info.trees.iter() {
info!("Creating thread for queue {}", tree.tree_accounts.queue);
let self_clone = self_arc.clone();
let epoch_info_clone = epoch_info_arc.clone();
let tree = tree.clone();
let handle = tokio::spawn(async move {
self_clone
.process_queue(
&epoch_info_clone.epoch,
&epoch_info_clone.forester_epoch_pda,
tree,
)
.await
});
handles.push(handle);
}
debug!("Threads created. Waiting for active phase to end");
// Wait for all tasks to complete
for result in join_all(handles).await {
match result {
Ok(Ok(())) => {
debug!("Queue processed successfully");
}
Ok(Err(e)) => error!("Error processing queue: {:?}", e),
Err(e) => error!("Task panicked: {:?}", e),
}
}
debug!("Waiting for active phase to end");
let mut rpc = self.rpc_pool.get_connection().await?;
wait_until_slot_reached(&mut *rpc, &self.slot_tracker, active_phase_end).await?;
info!("Completed active work");
Ok(())
}
// Sync estimated slot before creating threads.
// Threads rely on the estimated slot.
async fn sync_slot(&self) -> Result<u64> {
let mut rpc = self.rpc_pool.get_connection().await?;
let current_slot = rpc.get_slot().await?;
self.slot_tracker.update(current_slot);
Ok(current_slot)
}
#[instrument(
level = "debug",
skip(self, epoch_info, epoch_pda, tree),
fields(forester = %self.config.payer_keypair.pubkey(), epoch = epoch_info.epoch,
tree = %tree.tree_accounts.merkle_tree)
)]
pub async fn process_queue(
&self,
epoch_info: &Epoch,
epoch_pda: &ForesterEpochPda,
mut tree: TreeForesterSchedule,
) -> Result<()> {
debug!("enter process_queue");
debug!("Tree schedule slots: {:?}", tree.slots);
// TODO: sync at some point
let mut estimated_slot = self.slot_tracker.estimated_current_slot();
debug!(
"Estimated slot: {}, epoch end: {}",
estimated_slot, epoch_info.phases.active.end
);
while estimated_slot < epoch_info.phases.active.end {
debug!("Searching for next eligible slot");
// search for next eligible slot
let index_and_forester_slot = tree
.slots
.iter()
.enumerate()
.find(|(_, slot)| slot.is_some());
if let Some((index, forester_slot)) = index_and_forester_slot {
debug!("Found eligible slot");
let forester_slot = forester_slot.as_ref().unwrap().clone();
tree.slots.remove(index);
let mut rpc = self.rpc_pool.get_connection().await?;
// Wait until next eligible light slot is reached (until the start solana slot is reached)
wait_until_slot_reached(
&mut *rpc,
&self.slot_tracker,
forester_slot.start_solana_slot,
)
.await?;
let light_slot_timeout = {
let slot_length_u32 = u32::try_from(epoch_pda.protocol_config.slot_length)
.map_err(|_| ForesterError::Custom("Slot length overflow".into()))?;
slot_duration()
.checked_mul(slot_length_u32)
.ok_or_else(|| {
ForesterError::Custom("Timeout calculation overflow".into())
})?
};
// TODO: measure accuracy
// Optional replace with shutdown signal for all child processes
let batched_tx_config = SendBatchedTransactionsConfig {
num_batches: 10,
build_transaction_batch_config: BuildTransactionBatchConfig {
batch_size: 50, // TODO: make batch size configurable and or dynamic based on queue usage
compute_unit_price: None, // Make dynamic based on queue usage
compute_unit_limit: Some(1_000_000),
},
queue_config: self.config.queue_config,
retry_config: RetryConfig {
timeout: light_slot_timeout,
..self.config.retry_config
},
light_slot_length: epoch_pda.protocol_config.slot_length,
};
let transaction_builder = EpochManagerTransactions {
indexer: self.indexer.clone(), // TODO: remove clone
epoch: epoch_info.epoch,
phantom: std::marker::PhantomData::<R>,
};
debug!("Sending transactions...");
let start_time = Instant::now();
let batch_tx_future = send_batched_transactions(
&self.config.payer_keypair,
&self.config.derivation_pubkey,
self.rpc_pool.clone(),
&batched_tx_config, // TODO: define config in epoch manager
tree.tree_accounts,
&transaction_builder,
);
// Check whether the tree is ready for rollover once per slot.
let future = self.rollover_if_needed(&tree.tree_accounts);
// Wait for both operations to complete
let (num_tx_sent, rollover_result) = tokio::join!(batch_tx_future, future);
rollover_result?;
match num_tx_sent {
Ok(num_tx_sent) => {
debug!("Transactions sent successfully");
let chunk_duration = start_time.elapsed();
queue_metric_update(epoch_info.epoch, num_tx_sent, chunk_duration).await;
self.increment_processed_items_count(epoch_info.epoch, num_tx_sent)
.await;
}
Err(e) => {
error!("Failed to send transactions: {:?}", e);
}
}
} else {
debug!("No eligible slot found");
// The forester is not eligible for any more slots in the current epoch
break;
}
push_metrics(&self.config.external_services.pushgateway_url).await?;
// Yield to allow other tasks to run
tokio::task::yield_now().await;
estimated_slot = self.slot_tracker.estimated_current_slot();
debug!(
"Estimated slot: {}, epoch end: {}",
estimated_slot, epoch_info.phases.active.end
);
}
Ok(())
}
async fn rollover_if_needed(&self, tree_account: &TreeAccounts) -> Result<()> {
let mut rpc = self.rpc_pool.get_connection().await?;
if is_tree_ready_for_rollover(&mut *rpc, tree_account.merkle_tree, tree_account.tree_type)
.await?
{
info!("Starting {} rollover.", tree_account.merkle_tree);
self.perform_rollover(tree_account).await?;
}
Ok(())
}
fn is_in_active_phase(&self, slot: u64, epoch_info: &ForesterEpochInfo) -> Result<bool> {
let current_epoch = self.protocol_config.get_current_active_epoch(slot)?;
if current_epoch != epoch_info.epoch.epoch {
return Ok(false);
}
Ok(self
.protocol_config
.is_active_phase(slot, epoch_info.epoch.epoch)
.is_ok())
}
#[instrument(level = "debug", skip(self, epoch_info), fields(forester = %self.config.payer_keypair.pubkey(), epoch = epoch_info.epoch.epoch
))]
async fn wait_for_report_work_phase(&self, epoch_info: &ForesterEpochInfo) -> Result<()> {
info!("Waiting for report work phase");
let mut rpc = self.rpc_pool.get_connection().await?;
let report_work_start_slot = epoch_info.epoch.phases.report_work.start;
wait_until_slot_reached(&mut *rpc, &self.slot_tracker, report_work_start_slot).await?;
info!("Finished waiting for report work phase");
Ok(())
}
#[instrument(level = "debug", skip(self, epoch_info), fields(forester = %self.config.payer_keypair.pubkey(), epoch = epoch_info.epoch.epoch
))]
async fn report_work(&self, epoch_info: &ForesterEpochInfo) -> Result<()> {
info!("Reporting work");
let mut rpc =
SolanaRpcConnection::new(self.config.external_services.rpc_url.as_str(), None);
let forester_epoch_pda_pubkey = get_forester_epoch_pda_from_authority(
&self.config.derivation_pubkey,
epoch_info.epoch.epoch,
)
.0;
if let Some(forester_epoch_pda) = rpc
.get_anchor_account::<ForesterEpochPda>(&forester_epoch_pda_pubkey)
.await?
{
if forester_epoch_pda.has_reported_work {
return Ok(());
}
}
let forester_epoch_pda = &epoch_info.forester_epoch_pda;
if forester_epoch_pda.has_reported_work {
return Ok(());
}
let ix = create_report_work_instruction(
&self.config.payer_keypair.pubkey(),
&self.config.derivation_pubkey,
epoch_info.epoch.epoch,
);
match rpc
.create_and_send_transaction(
&[ix],
&self.config.payer_keypair.pubkey(),
&[&self.config.payer_keypair],
)
.await
{
Ok(_) => {
info!("Work reported successfully");
}
Err(e) => {
if let RpcError::ClientError(client_error) = &e {
if let Some(TransactionError::InstructionError(
_,
InstructionError::Custom(error_code),
)) = client_error.get_transaction_error()
{
let reported_work_code = RegistryError::ForesterAlreadyReportedWork as u32;
let not_in_report_work_phase_code =
RegistryError::NotInReportWorkPhase as u32;
if error_code == reported_work_code {
info!("Work already reported for this epoch. Skipping.");
return Ok(());
} else if error_code == not_in_report_work_phase_code {
warn!("Not in report work phase. Skipping report.");
return Ok(());
} else {
// Log other registry errors but still return an Err
warn!("Registry error encountered: {:?}", client_error);
}
}
}
return Err(ForesterError::from(e));
}
}
let report = WorkReport {
epoch: epoch_info.epoch.epoch,
processed_items: self.get_processed_items_count(epoch_info.epoch.epoch).await,
};
self.work_report_sender
.send(report)
.await
.map_err(|e| ForesterError::Custom(format!("Failed to send work report: {}", e)))?;
info!("Work reported");
Ok(())
}
async fn perform_rollover(&self, tree_account: &TreeAccounts) -> Result<()> {
let mut rpc = self.rpc_pool.get_connection().await?;
let result = match tree_account.tree_type {
TreeType::Address => {
rollover_address_merkle_tree(
self.config.clone(),
&mut *rpc,
self.indexer.clone(),
tree_account,
)
.await
}
TreeType::State => {
rollover_state_merkle_tree(
self.config.clone(),
&mut *rpc,
self.indexer.clone(),
tree_account,
)
.await
}
};
match result {
Ok(_) => debug!(
"{:?} tree rollover completed successfully",
tree_account.tree_type
),
Err(e) => warn!("{:?} tree rollover failed: {:?}", tree_account.tree_type, e),
}
Ok(())
}
#[allow(dead_code)]
async fn claim(&self, _forester_epoch_info: ForesterEpochInfo) {
todo!()
}
}
#[instrument(
level = "info",
skip(config, protocol_config, rpc_pool, indexer, shutdown, work_report_sender, slot_tracker),
fields(forester = %config.payer_keypair.pubkey())
)]
pub async fn run_service<R: RpcConnection, I: Indexer<R>>(
config: Arc<ForesterConfig>,
protocol_config: Arc<ProtocolConfig>,
rpc_pool: Arc<SolanaRpcPool<R>>,
indexer: Arc<Mutex<I>>,
shutdown: oneshot::Receiver<()>,
work_report_sender: mpsc::Sender<WorkReport>,
slot_tracker: Arc<SlotTracker>,
) -> Result<()> {
info_span!("run_service", forester = %config.payer_keypair.pubkey())
.in_scope(|| async {
const INITIAL_RETRY_DELAY: Duration = Duration::from_secs(1);
const MAX_RETRY_DELAY: Duration = Duration::from_secs(30);
let mut retry_count = 0;
let mut retry_delay = INITIAL_RETRY_DELAY;
let start_time = Instant::now();
let trees = {
let rpc = rpc_pool.get_connection().await?;
fetch_trees(&*rpc).await?
};
info!("Fetched initial trees: {:?}", trees);
let (new_tree_sender, _) = broadcast::channel(100);
let mut tree_finder = TreeFinder::new(
rpc_pool.clone(),
trees.clone(),
new_tree_sender.clone(),
Duration::from_secs(config.general_config.tree_discovery_interval_seconds),
);
let _tree_finder_handle = tokio::spawn(async move {
if let Err(e) = tree_finder.run().await {
error!("Tree finder error: {:?}", e);
}
});
while retry_count < config.retry_config.max_retries {
debug!("Creating EpochManager (attempt {})", retry_count + 1);
match EpochManager::new(
config.clone(),
protocol_config.clone(),
rpc_pool.clone(),
indexer.clone(),
work_report_sender.clone(),
trees.clone(),
slot_tracker.clone(),
new_tree_sender.clone(),
)
.await
{
Ok(epoch_manager) => {
let epoch_manager: Arc<EpochManager<R, I>> = Arc::new(epoch_manager);
debug!(
"Successfully created EpochManager after {} attempts",
retry_count + 1
);
return tokio::select! {
result = epoch_manager.run() => result,
_ = shutdown => {
info!("Received shutdown signal. Stopping the service.");
Ok(())
}
};
}
Err(e) => {
warn!(
"Failed to create EpochManager (attempt {}): {:?}",
retry_count + 1,
e
);
retry_count += 1;
if retry_count < config.retry_config.max_retries {
debug!("Retrying in {:?}", retry_delay);
sleep(retry_delay).await;
retry_delay = std::cmp::min(retry_delay * 2, MAX_RETRY_DELAY);
} else {
error!(
"Failed to start forester after {} attempts over {:?}",
config.retry_config.max_retries,
start_time.elapsed()
);
return Err(ForesterError::Custom(format!(
"Failed to start forester after {} attempts: {:?}",
config.retry_config.max_retries, e
)));
}
}
}
}
Err(ForesterError::Custom(
"Unexpected error: Retry loop exited without returning".to_string(),
))
})
.await
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester/src
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/rollover/mod.rs
|
mod operations;
mod state;
pub use operations::{
get_tree_fullness, is_tree_ready_for_rollover, rollover_address_merkle_tree,
rollover_state_merkle_tree,
};
pub use state::RolloverState;
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester/src
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/rollover/state.rs
|
use std::sync::atomic::{AtomicBool, Ordering};
#[derive(Debug)]
pub struct RolloverState {
is_rollover_in_progress: AtomicBool,
}
impl RolloverState {
pub fn new() -> Self {
Self {
is_rollover_in_progress: AtomicBool::new(false),
}
}
pub fn try_start_rollover(&self) -> bool {
self.is_rollover_in_progress
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
.is_ok()
}
pub fn end_rollover(&self) {
self.is_rollover_in_progress.store(false, Ordering::SeqCst);
}
}
impl Default for RolloverState {
fn default() -> Self {
Self::new()
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/forester/src
|
solana_public_repos/Lightprotocol/light-protocol/forester/src/rollover/operations.rs
|
use std::sync::Arc;
use light_registry::account_compression_cpi::sdk::{
create_rollover_address_merkle_tree_instruction, create_rollover_state_merkle_tree_instruction,
CreateRolloverMerkleTreeInstructionInputs,
};
use light_registry::protocol_config::state::ProtocolConfig;
use solana_sdk::instruction::Instruction;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::signature::Keypair;
use solana_sdk::signer::Signer;
use solana_sdk::transaction::Transaction;
use tokio::sync::Mutex;
use tracing::{debug, info};
use crate::errors::ForesterError;
use crate::ForesterConfig;
use account_compression::utils::constants::{
STATE_MERKLE_TREE_CANOPY_DEPTH, STATE_MERKLE_TREE_HEIGHT,
};
use account_compression::{
AddressMerkleTreeAccount, AddressMerkleTreeConfig, AddressQueueConfig, NullifierQueueConfig,
QueueAccount, StateMerkleTreeAccount, StateMerkleTreeConfig,
};
use forester_utils::address_merkle_tree_config::{
get_address_bundle_config, get_state_bundle_config,
};
use forester_utils::forester_epoch::{TreeAccounts, TreeType};
use forester_utils::indexer::{
AddressMerkleTreeAccounts, Indexer, StateMerkleTreeAccounts, StateMerkleTreeBundle,
};
use forester_utils::registry::RentExemption;
use forester_utils::{
create_account_instruction, get_concurrent_merkle_tree, get_indexed_merkle_tree,
};
use light_client::rpc::{RpcConnection, RpcError};
use light_hasher::Poseidon;
use light_merkle_tree_reference::MerkleTree;
enum TreeAccount {
State(StateMerkleTreeAccount),
Address(AddressMerkleTreeAccount),
}
#[derive(Debug, Clone)]
pub struct TreeInfo {
pub fullness: f64,
pub next_index: usize,
pub threshold: usize,
}
pub async fn get_tree_fullness<R: RpcConnection>(
rpc: &mut R,
tree_pubkey: Pubkey,
tree_type: TreeType,
) -> Result<TreeInfo, ForesterError> {
match tree_type {
TreeType::State => {
let account = rpc
.get_anchor_account::<StateMerkleTreeAccount>(&tree_pubkey)
.await?
.unwrap();
let merkle_tree =
get_concurrent_merkle_tree::<StateMerkleTreeAccount, R, Poseidon, 26>(
rpc,
tree_pubkey,
)
.await;
let height = 26;
let capacity = 1 << height;
let threshold = ((1 << height) * account.metadata.rollover_metadata.rollover_threshold
/ 100) as usize;
let next_index = merkle_tree.next_index();
let fullness = next_index as f64 / capacity as f64;
Ok(TreeInfo {
fullness,
next_index,
threshold,
})
}
TreeType::Address => {
let account = rpc
.get_anchor_account::<AddressMerkleTreeAccount>(&tree_pubkey)
.await?
.unwrap();
let queue_account = rpc
.get_anchor_account::<QueueAccount>(&account.metadata.associated_queue)
.await?
.unwrap();
let merkle_tree =
get_indexed_merkle_tree::<AddressMerkleTreeAccount, R, Poseidon, usize, 26, 16>(
rpc,
tree_pubkey,
)
.await;
let height = 26;
let capacity = 1 << height;
let threshold = ((1 << height)
* queue_account.metadata.rollover_metadata.rollover_threshold
/ 100) as usize;
let next_index = merkle_tree.next_index() - 3;
let fullness = next_index as f64 / capacity as f64;
Ok(TreeInfo {
fullness,
next_index,
threshold,
})
}
}
}
pub async fn is_tree_ready_for_rollover<R: RpcConnection>(
rpc: &mut R,
tree_pubkey: Pubkey,
tree_type: TreeType,
) -> Result<bool, ForesterError> {
debug!(
"Checking if tree is ready for rollover: {:?}",
tree_pubkey.to_string()
);
let account = match tree_type {
TreeType::State => TreeAccount::State(
rpc.get_anchor_account::<StateMerkleTreeAccount>(&tree_pubkey)
.await?
.unwrap(),
),
TreeType::Address => TreeAccount::Address(
rpc.get_anchor_account::<AddressMerkleTreeAccount>(&tree_pubkey)
.await?
.unwrap(),
),
};
let is_already_rolled_over = match &account {
TreeAccount::State(acc) => acc.metadata.rollover_metadata.rolledover_slot != u64::MAX,
TreeAccount::Address(acc) => acc.metadata.rollover_metadata.rolledover_slot != u64::MAX,
};
if is_already_rolled_over {
return Ok(false);
}
let tree_info = get_tree_fullness(rpc, tree_pubkey, tree_type).await?;
match tree_type {
TreeType::State => {
Ok(tree_info.next_index >= tree_info.threshold && tree_info.next_index > 1)
}
TreeType::Address => {
Ok(tree_info.next_index >= tree_info.threshold && tree_info.next_index > 3)
}
}
}
pub async fn rollover_state_merkle_tree<R: RpcConnection, I: Indexer<R>>(
config: Arc<ForesterConfig>,
rpc: &mut R,
indexer: Arc<Mutex<I>>,
tree_accounts: &TreeAccounts,
) -> Result<(), ForesterError> {
let new_nullifier_queue_keypair = Keypair::new();
let new_merkle_tree_keypair = Keypair::new();
let new_cpi_signature_keypair = Keypair::new();
let rollover_signature = perform_state_merkle_tree_rollover_forester(
&config.payer_keypair,
&config.derivation_pubkey,
rpc,
&new_nullifier_queue_keypair,
&new_merkle_tree_keypair,
&new_cpi_signature_keypair,
&tree_accounts.merkle_tree,
&tree_accounts.queue,
&Pubkey::default(),
)
.await?;
info!("State rollover signature: {:?}", rollover_signature);
let state_bundle = StateMerkleTreeBundle {
// TODO: fetch correct fee when this property is used
rollover_fee: 0,
accounts: StateMerkleTreeAccounts {
merkle_tree: new_merkle_tree_keypair.pubkey(),
nullifier_queue: new_nullifier_queue_keypair.pubkey(),
cpi_context: new_cpi_signature_keypair.pubkey(),
},
merkle_tree: Box::new(MerkleTree::<Poseidon>::new(
STATE_MERKLE_TREE_HEIGHT as usize,
STATE_MERKLE_TREE_CANOPY_DEPTH as usize,
)),
};
indexer.lock().await.add_state_bundle(state_bundle);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub async fn perform_state_merkle_tree_rollover_forester<R: RpcConnection>(
payer: &Keypair,
derivation: &Pubkey,
context: &mut R,
new_queue_keypair: &Keypair,
new_address_merkle_tree_keypair: &Keypair,
new_cpi_context_keypair: &Keypair,
old_merkle_tree_pubkey: &Pubkey,
old_queue_pubkey: &Pubkey,
old_cpi_context_pubkey: &Pubkey,
) -> Result<solana_sdk::signature::Signature, RpcError> {
let instructions = create_rollover_state_merkle_tree_instructions(
context,
&payer.pubkey(),
derivation,
new_queue_keypair,
new_address_merkle_tree_keypair,
new_cpi_context_keypair,
old_merkle_tree_pubkey,
old_queue_pubkey,
old_cpi_context_pubkey,
)
.await;
let blockhash = context.get_latest_blockhash().await.unwrap();
let transaction = Transaction::new_signed_with_payer(
&instructions,
Some(&payer.pubkey()),
&vec![
&payer,
&new_queue_keypair,
&new_address_merkle_tree_keypair,
&new_cpi_context_keypair,
],
blockhash,
);
context.process_transaction(transaction).await
}
pub async fn rollover_address_merkle_tree<R: RpcConnection, I: Indexer<R>>(
config: Arc<ForesterConfig>,
rpc: &mut R,
indexer: Arc<Mutex<I>>,
tree_data: &TreeAccounts,
) -> Result<(), ForesterError> {
let new_nullifier_queue_keypair = Keypair::new();
let new_merkle_tree_keypair = Keypair::new();
let rollover_signature = perform_address_merkle_tree_rollover(
&config.payer_keypair,
&config.derivation_pubkey,
rpc,
&new_nullifier_queue_keypair,
&new_merkle_tree_keypair,
&tree_data.merkle_tree,
&tree_data.queue,
)
.await?;
info!("Address rollover signature: {:?}", rollover_signature);
indexer.lock().await.add_address_merkle_tree_accounts(
&new_merkle_tree_keypair,
&new_nullifier_queue_keypair,
None,
);
Ok(())
}
pub async fn perform_address_merkle_tree_rollover<R: RpcConnection>(
payer: &Keypair,
derivation: &Pubkey,
context: &mut R,
new_queue_keypair: &Keypair,
new_address_merkle_tree_keypair: &Keypair,
old_merkle_tree_pubkey: &Pubkey,
old_queue_pubkey: &Pubkey,
) -> Result<solana_sdk::signature::Signature, RpcError> {
let instructions = create_rollover_address_merkle_tree_instructions(
context,
&payer.pubkey(),
derivation,
new_queue_keypair,
new_address_merkle_tree_keypair,
old_merkle_tree_pubkey,
old_queue_pubkey,
)
.await;
let blockhash = context.get_latest_blockhash().await.unwrap();
let transaction = Transaction::new_signed_with_payer(
&instructions,
Some(&payer.pubkey()),
&vec![&payer, &new_queue_keypair, &new_address_merkle_tree_keypair],
blockhash,
);
context.process_transaction(transaction).await
}
pub async fn create_rollover_address_merkle_tree_instructions<R: RpcConnection>(
rpc: &mut R,
authority: &Pubkey,
derivation: &Pubkey,
new_nullifier_queue_keypair: &Keypair,
new_address_merkle_tree_keypair: &Keypair,
merkle_tree_pubkey: &Pubkey,
nullifier_queue_pubkey: &Pubkey,
) -> Vec<Instruction> {
let (merkle_tree_config, queue_config) = get_address_bundle_config(
rpc,
AddressMerkleTreeAccounts {
merkle_tree: *merkle_tree_pubkey,
queue: *nullifier_queue_pubkey,
},
)
.await;
let (merkle_tree_rent_exemption, queue_rent_exemption) =
get_rent_exemption_for_address_merkle_tree_and_queue(
rpc,
&merkle_tree_config,
&queue_config,
)
.await;
let create_nullifier_queue_instruction = create_account_instruction(
authority,
queue_rent_exemption.size,
queue_rent_exemption.lamports,
&account_compression::ID,
Some(new_nullifier_queue_keypair),
);
let create_state_merkle_tree_instruction = create_account_instruction(
authority,
merkle_tree_rent_exemption.size,
merkle_tree_rent_exemption.lamports,
&account_compression::ID,
Some(new_address_merkle_tree_keypair),
);
let instruction = create_rollover_address_merkle_tree_instruction(
CreateRolloverMerkleTreeInstructionInputs {
authority: *authority,
derivation: *derivation,
new_queue: new_nullifier_queue_keypair.pubkey(),
new_merkle_tree: new_address_merkle_tree_keypair.pubkey(),
old_queue: *nullifier_queue_pubkey,
old_merkle_tree: *merkle_tree_pubkey,
cpi_context_account: None,
is_metadata_forester: false,
},
0, // TODO: make epoch dynamic
);
vec![
create_nullifier_queue_instruction,
create_state_merkle_tree_instruction,
instruction,
]
}
#[allow(clippy::too_many_arguments)]
pub async fn create_rollover_state_merkle_tree_instructions<R: RpcConnection>(
rpc: &mut R,
authority: &Pubkey,
derivation: &Pubkey,
new_nullifier_queue_keypair: &Keypair,
new_state_merkle_tree_keypair: &Keypair,
new_cpi_context_keypair: &Keypair,
merkle_tree_pubkey: &Pubkey,
nullifier_queue_pubkey: &Pubkey,
old_cpi_context_pubkey: &Pubkey,
) -> Vec<Instruction> {
let (merkle_tree_config, queue_config) = get_state_bundle_config(
rpc,
StateMerkleTreeAccounts {
merkle_tree: *merkle_tree_pubkey,
nullifier_queue: *nullifier_queue_pubkey,
cpi_context: *old_cpi_context_pubkey, // TODO: check if this is correct
},
)
.await;
let (state_merkle_tree_rent_exemption, queue_rent_exemption) =
get_rent_exemption_for_state_merkle_tree_and_queue(rpc, &merkle_tree_config, &queue_config)
.await;
let create_nullifier_queue_instruction = create_account_instruction(
authority,
queue_rent_exemption.size,
queue_rent_exemption.lamports,
&account_compression::ID,
Some(new_nullifier_queue_keypair),
);
let create_state_merkle_tree_instruction = create_account_instruction(
authority,
state_merkle_tree_rent_exemption.size,
state_merkle_tree_rent_exemption.lamports,
&account_compression::ID,
Some(new_state_merkle_tree_keypair),
);
let rent_cpi_config = rpc
.get_minimum_balance_for_rent_exemption(ProtocolConfig::default().cpi_context_size as usize)
.await
.unwrap();
let create_cpi_context_instruction = create_account_instruction(
authority,
ProtocolConfig::default().cpi_context_size as usize,
rent_cpi_config,
&light_system_program::ID,
Some(new_cpi_context_keypair),
);
let instruction = create_rollover_state_merkle_tree_instruction(
CreateRolloverMerkleTreeInstructionInputs {
authority: *authority,
derivation: *derivation,
new_queue: new_nullifier_queue_keypair.pubkey(),
new_merkle_tree: new_state_merkle_tree_keypair.pubkey(),
old_queue: *nullifier_queue_pubkey,
old_merkle_tree: *merkle_tree_pubkey,
cpi_context_account: Some(new_cpi_context_keypair.pubkey()),
is_metadata_forester: false,
},
0, // TODO: make epoch dynamic
);
vec![
create_cpi_context_instruction,
create_nullifier_queue_instruction,
create_state_merkle_tree_instruction,
instruction,
]
}
pub async fn get_rent_exemption_for_state_merkle_tree_and_queue<R: RpcConnection>(
rpc: &mut R,
merkle_tree_config: &StateMerkleTreeConfig,
queue_config: &NullifierQueueConfig,
) -> (RentExemption, RentExemption) {
let queue_size = QueueAccount::size(queue_config.capacity as usize).unwrap();
let queue_rent_exempt_lamports = rpc
.get_minimum_balance_for_rent_exemption(queue_size)
.await
.unwrap();
let tree_size = account_compression::state::StateMerkleTreeAccount::size(
merkle_tree_config.height as usize,
merkle_tree_config.changelog_size as usize,
merkle_tree_config.roots_size as usize,
merkle_tree_config.canopy_depth as usize,
);
let merkle_tree_rent_exempt_lamports = rpc
.get_minimum_balance_for_rent_exemption(tree_size)
.await
.unwrap();
(
RentExemption {
lamports: merkle_tree_rent_exempt_lamports,
size: tree_size,
},
RentExemption {
lamports: queue_rent_exempt_lamports,
size: queue_size,
},
)
}
pub async fn get_rent_exemption_for_address_merkle_tree_and_queue<R: RpcConnection>(
rpc: &mut R,
address_merkle_tree_config: &AddressMerkleTreeConfig,
address_queue_config: &AddressQueueConfig,
) -> (RentExemption, RentExemption) {
let queue_size = QueueAccount::size(address_queue_config.capacity as usize).unwrap();
let queue_rent_exempt_lamports = rpc
.get_minimum_balance_for_rent_exemption(queue_size)
.await
.unwrap();
let tree_size = account_compression::state::AddressMerkleTreeAccount::size(
address_merkle_tree_config.height as usize,
address_merkle_tree_config.changelog_size as usize,
address_merkle_tree_config.roots_size as usize,
address_merkle_tree_config.canopy_depth as usize,
address_merkle_tree_config.address_changelog_size as usize,
);
let merkle_tree_rent_exempt_lamports = rpc
.get_minimum_balance_for_rent_exemption(tree_size)
.await
.unwrap();
(
RentExemption {
lamports: merkle_tree_rent_exempt_lamports,
size: tree_size,
},
RentExemption {
lamports: queue_rent_exempt_lamports,
size: queue_size,
},
)
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol
|
solana_public_repos/Lightprotocol/light-protocol/xtask/Cargo.toml
|
[package]
name = "xtask"
version = "1.1.0"
edition = "2021"
[dependencies]
account-compression = { workspace = true }
anyhow = "1.0"
ark-bn254 = "0.4"
ark-ff = "0.4"
clap = { version = "4", features = ["derive"] }
groth16-solana = "0.0.3"
light-concurrent-merkle-tree = { path = "../merkle-tree/concurrent", version = "1.1.0" }
light-hash-set = { workspace = true }
light-hasher = { path = "../merkle-tree/hasher", version = "1.1.0" }
light-indexed-merkle-tree = { path = "../merkle-tree/indexed", version = "1.1.0" }
light-utils = { path = "../utils", version = "1.1.0" }
num-bigint = "0.4"
rand = "0.8"
quote = "1.0"
sha2 = "0.10"
solana-program = { workspace = true }
tabled = "0.15"
| 0
|
solana_public_repos/Lightprotocol/light-protocol/xtask
|
solana_public_repos/Lightprotocol/light-protocol/xtask/src/fee.rs
|
use account_compression::{
state::queue::QueueAccount,
utils::constants::{
ADDRESS_MERKLE_TREE_HEIGHT, ADDRESS_QUEUE_VALUES, STATE_MERKLE_TREE_HEIGHT,
STATE_NULLIFIER_QUEUE_VALUES,
},
AddressMerkleTreeAccount, StateMerkleTreeAccount, StateMerkleTreeConfig,
};
use light_utils::fee::compute_rollover_fee;
use solana_program::rent::Rent;
use std::mem;
use tabled::{Table, Tabled};
#[derive(Tabled)]
struct AccountFee {
account: String,
fee: u64,
}
pub fn fees() -> anyhow::Result<()> {
let rent = Rent::default();
let state_merkle_tree_config = StateMerkleTreeConfig::default();
let fees = vec![
AccountFee {
account: "State Merkle tree (rollover)".to_owned(),
fee: compute_rollover_fee(
state_merkle_tree_config.rollover_threshold.unwrap(),
STATE_MERKLE_TREE_HEIGHT as u32,
rent.minimum_balance(8 + mem::size_of::<StateMerkleTreeAccount>()),
)? + compute_rollover_fee(
state_merkle_tree_config.rollover_threshold.unwrap(),
STATE_MERKLE_TREE_HEIGHT as u32,
rent.minimum_balance(QueueAccount::size(STATE_NULLIFIER_QUEUE_VALUES as usize)?),
)?,
},
AccountFee {
account: "Address queue (rollover)".to_owned(),
fee: compute_rollover_fee(
state_merkle_tree_config.rollover_threshold.unwrap(),
ADDRESS_MERKLE_TREE_HEIGHT as u32,
rent.minimum_balance(8 + mem::size_of::<AddressMerkleTreeAccount>()),
)? + compute_rollover_fee(
state_merkle_tree_config.rollover_threshold.unwrap(),
ADDRESS_MERKLE_TREE_HEIGHT as u32,
rent.minimum_balance(QueueAccount::size(ADDRESS_QUEUE_VALUES.into())?),
)?,
},
];
let table = Table::new(fees);
println!("{table}");
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/xtask
|
solana_public_repos/Lightprotocol/light-protocol/xtask/src/create_vkeyrs_from_gnark_key.rs
|
use clap::Parser;
use groth16_solana::groth16::Groth16Verifyingkey;
use light_utils::rustfmt;
use quote::quote;
use std::{
fs::{self, File},
io::{self, prelude::*},
path::{Path, PathBuf},
};
#[derive(Debug, Parser)]
pub struct Options {
#[clap(long)]
output_path: PathBuf,
#[clap(long)]
input_path: PathBuf,
}
pub fn create_vkeyrs_from_gnark_key(opts: Options) -> anyhow::Result<()> {
let gnark_vk_bytes = read_array_from_file(opts.input_path)?;
let vk = read_gnark_vk_bytes(&gnark_vk_bytes);
let nr_pubinputs = vk.nr_pubinputs;
let vk_alpha_g1 = vk.vk_alpha_g1.iter().map(|b| quote! {#b});
let vk_beta_g2 = vk.vk_beta_g2.iter().map(|b| quote! {#b});
let vk_gamme_g2 = vk.vk_gamme_g2.iter().map(|b| quote! {#b});
let vk_delta_g2 = vk.vk_delta_g2.iter().map(|b| quote! {#b});
let vk_ic_slices = vk.vk_ic.iter().map(|slice| {
let bytes = slice.iter().map(|b| quote! {#b});
quote! {[#(#bytes),*]}
});
// Now use these parts with the quote! macro
let code = quote! {
use groth16_solana::groth16::Groth16Verifyingkey;
pub const VERIFYINGKEY: Groth16Verifyingkey = Groth16Verifyingkey {
nr_pubinputs: #nr_pubinputs,
vk_alpha_g1: [#(#vk_alpha_g1),*],
vk_beta_g2: [#(#vk_beta_g2),*],
vk_gamme_g2: [#(#vk_gamme_g2),*],
vk_delta_g2: [#(#vk_delta_g2),*],
vk_ic: &[#(#vk_ic_slices),*],
};
};
let mut file = File::create(&opts.output_path)?;
file.write_all(b"// This file is generated by xtask. Do not edit it manually.\n\n")?;
file.write_all(&rustfmt(code.to_string())?)?;
Ok(())
}
fn read_array_from_file<P: AsRef<Path>>(file_path: P) -> io::Result<Vec<u8>> {
// Read the entire file to a String
let contents = fs::read_to_string(file_path)?;
// Parse the string as a Vec<u8>
let array = contents
.trim_matches(|p| p == '[' || p == ']')
.split(' ')
.map(str::trim)
.filter_map(|s| s.parse::<u8>().ok())
.collect::<Vec<u8>>();
Ok(array)
}
pub fn read_gnark_vk_bytes<'a>(gnark_vk_bytes: &[u8]) -> Groth16Verifyingkey<'a> {
// layout of vk:
// [α]1,[β]1,[β]2,[γ]2,[δ]1,[δ]2, K/IC[num_pubinputs + 1]
let alpha_g1_offset_start: usize = 0;
let alpha_g1_offset_end: usize = 64;
let beta_g2_offset_start: usize = 128;
let beta_g2_offset_end: usize = 256;
let gamma_g2_offset_start: usize = 256;
let gamma_g2_offset_end: usize = 384;
let delta_g2_offset_start: usize = 384 + 64;
let delta_g2_offset_end: usize = 512 + 64;
// K offsets (each element is 64 bytes)
let nr_k_offset_start: usize = 512 + 64;
let nr_k_offset_end: usize = 512 + 64 + 4;
let k_offset_start: usize = 512 + 64 + 4;
let nr_pubinputs: usize = u32::from_be_bytes(
gnark_vk_bytes[nr_k_offset_start..nr_k_offset_end]
.try_into()
.unwrap(),
)
.try_into()
.unwrap();
let gamma_g2_be = gnark_vk_bytes[gamma_g2_offset_start..gamma_g2_offset_end]
.try_into()
.unwrap();
let delta_g2_be = gnark_vk_bytes[delta_g2_offset_start..delta_g2_offset_end]
.try_into()
.unwrap();
let mut vk_ic = Vec::<[u8; 64]>::new();
for i in 0..nr_pubinputs {
vk_ic.push(
gnark_vk_bytes[k_offset_start + i * 64..k_offset_start + (i + 1) * 64]
.try_into()
.unwrap(),
);
}
let vk_ic = Box::new(vk_ic);
let vk_ic_slice: &'a [[u8; 64]] = Box::leak(vk_ic);
Groth16Verifyingkey {
nr_pubinputs: nr_pubinputs - 1,
vk_alpha_g1: gnark_vk_bytes[alpha_g1_offset_start..alpha_g1_offset_end]
.try_into()
.unwrap(),
vk_beta_g2: gnark_vk_bytes[beta_g2_offset_start..beta_g2_offset_end]
.try_into()
.unwrap(),
vk_gamme_g2: gamma_g2_be,
vk_delta_g2: delta_g2_be,
vk_ic: vk_ic_slice,
}
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/xtask
|
solana_public_repos/Lightprotocol/light-protocol/xtask/src/zero_bytes.rs
|
use std::{fs::File, io::prelude::*, path::PathBuf};
use clap::Parser;
use light_hasher::{zero_bytes::MAX_HEIGHT, Hasher, Keccak, Poseidon, Sha256};
use light_utils::rustfmt;
use quote::quote;
use crate::Hash;
#[derive(Debug, Parser)]
pub struct Options {
#[clap(value_enum, long, default_value_t = Hash::Sha256)]
hash: Hash,
#[clap(long)]
path: Option<PathBuf>,
}
pub fn generate_zero_bytes(opts: Options) -> anyhow::Result<()> {
match opts.hash {
Hash::Keccak => generate_zero_bytes_for_hasher::<Keccak>(opts),
Hash::Poseidon => generate_zero_bytes_for_hasher::<Poseidon>(opts),
Hash::Sha256 => generate_zero_bytes_for_hasher::<Sha256>(opts),
}
}
fn generate_zero_bytes_for_hasher<H>(opts: Options) -> anyhow::Result<()>
where
H: Hasher,
{
let mut zero_bytes = [[0u8; 32]; MAX_HEIGHT + 1];
let mut zero_bytes_tokens = vec![];
let mut prev_hash = H::hashv(&[&[0u8; 32], &[0u8; 32]]).unwrap();
zero_bytes[1] = prev_hash;
for element in zero_bytes[2..].iter_mut() {
let cur_hash = H::hashv(&[&prev_hash, &prev_hash]).unwrap();
element.copy_from_slice(&cur_hash);
prev_hash = cur_hash;
}
for element in zero_bytes.iter() {
let element_iter = element.iter();
zero_bytes_tokens.push(quote! {
[ #(#element_iter),* ]
});
}
// NOTE(vadorovsky): I couldn't find any way to do a double repetition
// over a 2D array inside `quote` macro, that's why arrays are converted
// to tokens in the loop above. But I would be grateful if there is any
// way to make it prettier.
//
// Being able to do something like:
//
// ```rust
// let code = quote! {
// const ZERO_BYTES: ZeroBytes = [ #([ #(#zero_bytes),* ]),* ];
// };
// ```
//
// would be great.
let code = quote! {
use super::ZeroBytes;
pub const ZERO_BYTES: ZeroBytes = [ #(#zero_bytes_tokens),* ];
};
println!(
"Zero bytes (generated with {:?} hash): {:?}",
opts.hash, zero_bytes
);
if let Some(path) = opts.path {
let mut file = File::create(&path)?;
file.write_all(b"// This file is generated by xtask. Do not edit it manually.\n\n")?;
file.write_all(&rustfmt(code.to_string())?)?;
println!("Zero bytes written to {:?}", path);
}
Ok(())
}
| 0
|
solana_public_repos/Lightprotocol/light-protocol/xtask
|
solana_public_repos/Lightprotocol/light-protocol/xtask/src/main.rs
|
use clap::{Parser, ValueEnum};
mod bench;
mod create_vkeyrs_from_gnark_key;
mod fee;
mod hash_set;
mod type_sizes;
mod zero_bytes;
mod zero_indexed_leaf;
#[derive(Debug, Clone, ValueEnum)]
enum Hash {
Keccak,
Poseidon,
Sha256,
}
#[derive(Parser)]
pub struct XtaskOptions {
#[clap(subcommand)]
command: Command,
}
#[derive(Parser)]
enum Command {
GenerateZeroBytes(zero_bytes::Options),
/// Generates a leaf of an indexed Merkle tree, for the given hash, which
/// represents a value 0.
GenerateZeroIndexedLeaf(zero_indexed_leaf::Options),
/// Shows the sizes of types used as Light Protocol accounts (or their
/// fields).
TypeSizes,
/// Generates the verification keys for the given gnark key.
GenerateVkeyRs(create_vkeyrs_from_gnark_key::Options),
/// Generates cu and heap memory usage report from a log.txt file
Bench(bench::Options),
/// Prints fees for different accounts.
Fee,
/// Hash set utilities.
HashSet(hash_set::HashSetOptions),
}
fn main() -> Result<(), anyhow::Error> {
let opts = XtaskOptions::parse();
match opts.command {
Command::TypeSizes => type_sizes::type_sizes(),
Command::GenerateZeroBytes(opts) => zero_bytes::generate_zero_bytes(opts),
Command::GenerateZeroIndexedLeaf(opts) => {
zero_indexed_leaf::generate_zero_indexed_leaf(opts)
}
Command::GenerateVkeyRs(opts) => {
create_vkeyrs_from_gnark_key::create_vkeyrs_from_gnark_key(opts)
}
Command::Bench(opts) => bench::bench(opts),
Command::Fee => fee::fees(),
Command::HashSet(opts) => hash_set::hash_set(opts),
}
}
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.