opp_parser: relay script + event parser + 116 parsed Ethereum OPP messages
- relay.ts: automated Ethereum->Wire operator relay pipeline - parse_balances.ts: Ethereum OPP event parser - types.ts: assertion type constants (2001-3006) - opp_events_current.json: 116 parsed messages from Hoodi outpost
This commit is contained in:
6080
opp_events_current.json
Normal file
6080
opp_events_current.json
Normal file
File diff suppressed because it is too large
Load Diff
30
package.json
Normal file
30
package.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "opp-parser",
|
||||
"version": "1.0.0",
|
||||
"description": "OPP Balance Parser for EOSIO Migration",
|
||||
"main": "src/parse_balances.ts",
|
||||
"scripts": {
|
||||
"parse": "ts-node src/parse_balances.ts",
|
||||
"compare": "ts-node src/compare_balances.ts",
|
||||
"relay": "ts-node src/relay.ts",
|
||||
"build": "tsc",
|
||||
"start": "node dist/parse_balances.js",
|
||||
"test": "jest"
|
||||
},
|
||||
"keywords": [
|
||||
"opp",
|
||||
"blockchain",
|
||||
"eosio",
|
||||
"parser"
|
||||
],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.0",
|
||||
"@types/node": "^20.10.0",
|
||||
"jest": "^29.7.0",
|
||||
"ts-jest": "^29.1.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.3.0"
|
||||
}
|
||||
}
|
||||
253
src/compare_balances.ts
Normal file
253
src/compare_balances.ts
Normal file
@@ -0,0 +1,253 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { BalanceOutput, StakerOutput, PurchaserOutput } from './types';
|
||||
|
||||
// Tolerance: use absolute tolerance for small values, relative for large values.
|
||||
// The coworker's table truncates display to 4-5 significant figures, so values in
|
||||
// the thousands can differ by ~1.0 due to display rounding alone.
|
||||
const ABS_TOLERANCE = parseFloat(process.env.TOLERANCE ?? '1.5');
|
||||
const WEI = 1e18;
|
||||
|
||||
interface CoworkerRow {
|
||||
account: string;
|
||||
principal: number;
|
||||
staked: number;
|
||||
shares: number;
|
||||
preSpent: number;
|
||||
prePurchased: number;
|
||||
refundValue: number;
|
||||
pretokenYield: number;
|
||||
}
|
||||
|
||||
interface MergedRow {
|
||||
principal: number;
|
||||
staked: number;
|
||||
shares: number;
|
||||
preSpent: number;
|
||||
prePurchased: number;
|
||||
refundValue: number;
|
||||
pretokenYield: number;
|
||||
}
|
||||
|
||||
// Parse the coworker's box-drawing table from the temp file
|
||||
function parseCoworkerTable(filePath: string): { rows: CoworkerRow[]; totals: CoworkerRow } {
|
||||
const raw = fs.readFileSync(filePath, 'utf-8');
|
||||
const lines = raw.split('\n');
|
||||
|
||||
const rows: CoworkerRow[] = [];
|
||||
let totals: CoworkerRow | null = null;
|
||||
|
||||
for (const line of lines) {
|
||||
// Skip border lines (┌─┬─┐, ├─┼─┤, └─┴─┘) and empty lines
|
||||
if (!line.includes('│') || line.trim() === '') continue;
|
||||
|
||||
// Split by │ and trim each cell
|
||||
const cells = line.split('│').map((c) => c.trim()).filter((c) => c !== '');
|
||||
|
||||
// Skip header row
|
||||
if (cells[0] === 'Account') continue;
|
||||
|
||||
// Check if this is the totals row
|
||||
if (cells[0] === 'Totals') {
|
||||
totals = {
|
||||
account: 'Totals',
|
||||
principal: parseFloat(cells[1]),
|
||||
staked: parseFloat(cells[2]),
|
||||
shares: parseFloat(cells[3]),
|
||||
preSpent: parseFloat(cells[4]),
|
||||
prePurchased: parseFloat(cells[5]),
|
||||
refundValue: parseFloat(cells[6]),
|
||||
pretokenYield: parseFloat(cells[7]),
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
// Regular account row - must start with 0x
|
||||
if (!cells[0].startsWith('0x')) continue;
|
||||
|
||||
rows.push({
|
||||
account: cells[0].toLowerCase(),
|
||||
principal: parseFloat(cells[1]),
|
||||
staked: parseFloat(cells[2]),
|
||||
shares: parseFloat(cells[3]),
|
||||
preSpent: parseFloat(cells[4]),
|
||||
prePurchased: parseFloat(cells[5]),
|
||||
refundValue: parseFloat(cells[6]),
|
||||
pretokenYield: parseFloat(cells[7]),
|
||||
});
|
||||
}
|
||||
|
||||
if (!totals) {
|
||||
throw new Error('No totals row found in coworker data');
|
||||
}
|
||||
|
||||
return { rows, totals };
|
||||
}
|
||||
|
||||
// Merge staker + purchaser data for an address into a single comparable row
|
||||
function mergeOurData(
|
||||
address: string,
|
||||
staker: StakerOutput | undefined,
|
||||
purchaser: PurchaserOutput | undefined,
|
||||
latestIndex: bigint,
|
||||
): MergedRow {
|
||||
const stakerPrincipal = staker ? Number(BigInt(staker.principal)) / WEI : 0;
|
||||
const purchaserPrincipal = purchaser ? Number(BigInt(purchaser.principal)) / WEI : 0;
|
||||
|
||||
const stakerShares = staker ? Number(BigInt(staker.shares)) / WEI : 0;
|
||||
const purchaserShares = purchaser ? Number(BigInt(purchaser.shares)) / WEI : 0;
|
||||
|
||||
const stakerYield = staker ? Number(BigInt(staker.pretokenYield)) / WEI : 0;
|
||||
const purchaserYield = purchaser ? Number(BigInt(purchaser.yield)) / WEI : 0;
|
||||
|
||||
const purchaserPretokens = purchaser ? Number(BigInt(purchaser.pretokens)) / WEI : 0;
|
||||
|
||||
// Refund value = (totalShares * latestIndex) / 1e27
|
||||
const totalShares = BigInt(staker?.shares ?? '0') + BigInt(purchaser?.shares ?? '0');
|
||||
const refundValue = Number((totalShares * latestIndex) / BigInt('1000000000000000000000000000')) / WEI;
|
||||
|
||||
return {
|
||||
principal: stakerPrincipal + purchaserPrincipal,
|
||||
staked: stakerPrincipal,
|
||||
shares: stakerShares + purchaserShares,
|
||||
preSpent: purchaserPrincipal,
|
||||
prePurchased: purchaserPretokens,
|
||||
refundValue,
|
||||
pretokenYield: stakerYield + purchaserYield,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if two values match within tolerance
|
||||
function matches(ours: number, theirs: number): boolean {
|
||||
return Math.abs(ours - theirs) <= ABS_TOLERANCE;
|
||||
}
|
||||
|
||||
// Format a number for display, matching coworker's precision style
|
||||
function fmt(n: number): string {
|
||||
if (n === 0) return '0';
|
||||
if (Math.abs(n) >= 100) return n.toFixed(0);
|
||||
if (Math.abs(n) >= 1) return n.toFixed(4);
|
||||
return n.toFixed(4);
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
const tempPath = path.join(__dirname, '..', 'temp');
|
||||
const balancesPath = path.join(__dirname, '..', 'balances_output.json');
|
||||
|
||||
if (!fs.existsSync(tempPath)) {
|
||||
console.error('Missing coworker data file: ./temp');
|
||||
process.exit(1);
|
||||
}
|
||||
if (!fs.existsSync(balancesPath)) {
|
||||
console.error('Missing balances_output.json - run `npm run parse` first');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Load data
|
||||
const { rows: coworkerRows, totals: coworkerTotals } = parseCoworkerTable(tempPath);
|
||||
const balances: BalanceOutput = JSON.parse(fs.readFileSync(balancesPath, 'utf-8'));
|
||||
const latestIndex = BigInt(balances.metadata.latestIndex);
|
||||
|
||||
// Collect all unique addresses
|
||||
const allAddresses = new Set<string>();
|
||||
for (const row of coworkerRows) allAddresses.add(row.account);
|
||||
for (const addr of Object.keys(balances.stakers)) allAddresses.add(addr);
|
||||
for (const addr of Object.keys(balances.purchasers)) allAddresses.add(addr);
|
||||
|
||||
const fields = ['principal', 'staked', 'shares', 'preSpent', 'prePurchased', 'refundValue', 'pretokenYield'] as const;
|
||||
const fieldLabels: Record<typeof fields[number], string> = {
|
||||
principal: 'Principal',
|
||||
staked: 'Staked',
|
||||
shares: 'Shares',
|
||||
preSpent: 'PRE Spent',
|
||||
prePurchased: 'PRE Purchased',
|
||||
refundValue: 'Refund Value',
|
||||
pretokenYield: 'Pretoken Yield',
|
||||
};
|
||||
|
||||
let mismatches = 0;
|
||||
let totalChecked = 0;
|
||||
|
||||
// Our totals accumulator
|
||||
const ourTotals: MergedRow = {
|
||||
principal: 0, staked: 0, shares: 0, preSpent: 0,
|
||||
prePurchased: 0, refundValue: 0, pretokenYield: 0,
|
||||
};
|
||||
|
||||
console.log('=== Per-Account Comparison ===\n');
|
||||
|
||||
for (const addr of [...allAddresses].sort()) {
|
||||
const coworkerRow = coworkerRows.find((r) => r.account === addr);
|
||||
const ours = mergeOurData(
|
||||
addr,
|
||||
balances.stakers[addr],
|
||||
balances.purchasers[addr],
|
||||
latestIndex,
|
||||
);
|
||||
|
||||
// Accumulate our totals
|
||||
for (const f of fields) ourTotals[f] += ours[f];
|
||||
|
||||
if (!coworkerRow) {
|
||||
console.log(`${addr}: MISSING from coworker data`);
|
||||
mismatches++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Compare each field
|
||||
const diffs: string[] = [];
|
||||
for (const f of fields) {
|
||||
totalChecked++;
|
||||
const ourVal = ours[f];
|
||||
const theirVal = coworkerRow[f];
|
||||
if (!matches(ourVal, theirVal)) {
|
||||
diffs.push(` ${fieldLabels[f]}: ours=${fmt(ourVal)} theirs=${fmt(theirVal)} diff=${fmt(ourVal - theirVal)}`);
|
||||
mismatches++;
|
||||
}
|
||||
}
|
||||
|
||||
if (diffs.length > 0) {
|
||||
console.log(`${addr}: MISMATCH`);
|
||||
for (const d of diffs) console.log(d);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for addresses in coworker data but not in ours
|
||||
for (const row of coworkerRows) {
|
||||
if (!balances.stakers[row.account] && !balances.purchasers[row.account]) {
|
||||
console.log(`${row.account}: MISSING from our data`);
|
||||
mismatches++;
|
||||
}
|
||||
}
|
||||
|
||||
// Totals comparison
|
||||
console.log('\n=== Totals Comparison ===\n');
|
||||
console.log(`${'Field'.padEnd(20)} ${'Ours'.padStart(12)} ${'Theirs'.padStart(12)} ${'Diff'.padStart(12)} ${'Status'.padStart(8)}`);
|
||||
console.log('-'.repeat(66));
|
||||
|
||||
for (const f of fields) {
|
||||
const ourVal = ourTotals[f];
|
||||
const theirVal = coworkerTotals[f];
|
||||
const diff = ourVal - theirVal;
|
||||
const status = matches(ourVal, theirVal) ? 'OK' : 'MISMATCH';
|
||||
if (status === 'MISMATCH') mismatches++;
|
||||
console.log(
|
||||
`${fieldLabels[f].padEnd(20)} ${fmt(ourVal).padStart(12)} ${fmt(theirVal).padStart(12)} ${fmt(diff).padStart(12)} ${status.padStart(8)}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Summary
|
||||
console.log(`\n=== Summary ===`);
|
||||
console.log(`Accounts compared: ${allAddresses.size}`);
|
||||
console.log(`Fields checked: ${totalChecked}`);
|
||||
console.log(`Tolerance: ${ABS_TOLERANCE} (override with TOLERANCE env var)`);
|
||||
|
||||
if (mismatches === 0) {
|
||||
console.log('\nAll values match within tolerance!');
|
||||
} else {
|
||||
console.log(`\n${mismatches} mismatch(es) found.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
1164
src/parse_balances.test.ts
Normal file
1164
src/parse_balances.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
366
src/parse_balances.ts
Normal file
366
src/parse_balances.ts
Normal file
@@ -0,0 +1,366 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import {
|
||||
OPPMessage,
|
||||
StakerState,
|
||||
PurchaserState,
|
||||
BalanceOutput,
|
||||
StakerOutput,
|
||||
PurchaserOutput,
|
||||
ASSERTION_TYPES,
|
||||
BigIntSerialized,
|
||||
} from './types';
|
||||
|
||||
// Constants
|
||||
export const INDEX_PRECISION = BigInt('1000000000000000000000000000'); // 1e27
|
||||
export const YIELD_PRECISION = BigInt('1000000000000000000000000000'); // 1e27 - for scaled yield arithmetic
|
||||
|
||||
// Parse BigInt from serialized format
|
||||
export function parseBigInt(val: any): bigint {
|
||||
if (val === null || val === undefined) {
|
||||
return BigInt(0);
|
||||
}
|
||||
if (typeof val === 'object' && val.__bigint__) {
|
||||
return BigInt(val.__bigint__);
|
||||
}
|
||||
if (typeof val === 'bigint') {
|
||||
return val;
|
||||
}
|
||||
if (typeof val === 'string') {
|
||||
return BigInt(val);
|
||||
}
|
||||
if (typeof val === 'number') {
|
||||
return BigInt(val);
|
||||
}
|
||||
return BigInt(0);
|
||||
}
|
||||
|
||||
// Main parser class
|
||||
export class OPPBalanceParser {
|
||||
private stakers: Map<string, StakerState> = new Map();
|
||||
private purchasers: Map<string, PurchaserState> = new Map();
|
||||
private latestIndex: bigint = INDEX_PRECISION; // Default to 1e27 (1.0)
|
||||
private lastMessageId: string = '';
|
||||
private lastTimestamp: string = '';
|
||||
private totalMessages: number = 0;
|
||||
private totalPrincipal: bigint = BigInt(0); // Track total NET principal for yield distribution (stakers + liq purchasers)
|
||||
private yieldDust: bigint = BigInt(0); // Track rounding errors from yield distribution
|
||||
|
||||
// Get or create staker state
|
||||
private getStaker(address: string): StakerState {
|
||||
const normalized = address.toLowerCase();
|
||||
if (!this.stakers.has(normalized)) {
|
||||
this.stakers.set(normalized, {
|
||||
totalShares: BigInt(0),
|
||||
totalPrincipalIn: BigInt(0),
|
||||
totalAmountOut: BigInt(0),
|
||||
yieldClaimed: BigInt(0),
|
||||
});
|
||||
}
|
||||
return this.stakers.get(normalized)!;
|
||||
}
|
||||
|
||||
// Get or create purchaser state
|
||||
private getPurchaser(address: string): PurchaserState {
|
||||
const normalized = address.toLowerCase();
|
||||
if (!this.purchasers.has(normalized)) {
|
||||
this.purchasers.set(normalized, {
|
||||
pretokens: BigInt(0),
|
||||
liqShares: BigInt(0),
|
||||
liqPrincipalIn: BigInt(0),
|
||||
yieldClaimed: BigInt(0),
|
||||
});
|
||||
}
|
||||
return this.purchasers.get(normalized)!;
|
||||
}
|
||||
|
||||
// Process a single assertion
|
||||
private processAssertion(
|
||||
assertion: { assertionType: number; args: Record<string, any> },
|
||||
allAssertions: Array<{ assertionType: number; args: Record<string, any> }>
|
||||
): void {
|
||||
const { assertionType, args } = assertion;
|
||||
|
||||
switch (assertionType) {
|
||||
case ASSERTION_TYPES.STAKE: {
|
||||
// 3001: Stake - staker, principal, shares, index
|
||||
const staker = this.getStaker(args.staker);
|
||||
const shares = parseBigInt(args.shares);
|
||||
const principal = parseBigInt(args.principal);
|
||||
const index = parseBigInt(args.index);
|
||||
|
||||
staker.totalShares += shares;
|
||||
staker.totalPrincipalIn += principal;
|
||||
this.totalPrincipal += principal; // Track for yield distribution
|
||||
|
||||
if (index > BigInt(0)) {
|
||||
this.latestIndex = index;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case ASSERTION_TYPES.UNSTAKE: {
|
||||
// 3002: Unstake - staker, amount, shares, index, tokenId
|
||||
const staker = this.getStaker(args.staker);
|
||||
const shares = parseBigInt(args.shares);
|
||||
const amount = parseBigInt(args.amount);
|
||||
const index = parseBigInt(args.index);
|
||||
|
||||
staker.totalShares -= shares;
|
||||
staker.totalAmountOut += amount;
|
||||
this.totalPrincipal -= amount; // Reduce total principal for yield distribution
|
||||
|
||||
if (index > BigInt(0)) {
|
||||
this.latestIndex = index;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case ASSERTION_TYPES.LIQ_PRETOKEN_PURCHASE: {
|
||||
// 3004: Liq Pretoken Purchase - purchaser, principal, shares, index
|
||||
const purchaser = this.getPurchaser(args.purchaser);
|
||||
const shares = parseBigInt(args.shares);
|
||||
const principal = parseBigInt(args.principal);
|
||||
const index = parseBigInt(args.index);
|
||||
|
||||
purchaser.liqShares += shares;
|
||||
purchaser.liqPrincipalIn += principal;
|
||||
this.totalPrincipal += principal; // Track for yield distribution
|
||||
|
||||
if (index > BigInt(0)) {
|
||||
this.latestIndex = index;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case ASSERTION_TYPES.PRETOKEN_PURCHASE: {
|
||||
// 3005: Pretoken Purchase - buyer, ethIn, usdValue, pretokensOut
|
||||
// Skip if paired with 3006 (yield event) - those pretokens are for distribution, not direct purchase
|
||||
const isPairedWithYield = allAssertions.some(
|
||||
(a) => a.assertionType === ASSERTION_TYPES.YIELD_PRETOKEN_PURCHASE
|
||||
);
|
||||
|
||||
if (!isPairedWithYield) {
|
||||
const purchaser = this.getPurchaser(args.buyer);
|
||||
const pretokensOut = parseBigInt(args.pretokensOut);
|
||||
purchaser.pretokens += pretokensOut;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case ASSERTION_TYPES.YIELD_PRETOKEN_PURCHASE: {
|
||||
// 3006: Yield Pretoken Purchase - purchaser, principal, index
|
||||
// Find paired 3005 event in same assertions array and use pretokensOut
|
||||
const index = parseBigInt(args.index);
|
||||
|
||||
// Find paired 3005 event to get actual pretokens to distribute
|
||||
const pairedPretoken = allAssertions.find(
|
||||
(a) => a.assertionType === ASSERTION_TYPES.PRETOKEN_PURCHASE
|
||||
);
|
||||
|
||||
if (pairedPretoken && this.totalPrincipal > BigInt(0)) {
|
||||
const pretokensToDistribute = parseBigInt(pairedPretoken.args.pretokensOut);
|
||||
|
||||
// Scale up pretokens before distribution to minimize precision loss
|
||||
// We multiply by YIELD_PRECISION first, then divide by totalPrincipal
|
||||
// This delays truncation and preserves more precision
|
||||
const scaledPretokens = pretokensToDistribute * YIELD_PRECISION;
|
||||
|
||||
// Distribute to stakers based on NET principal (after unstakes)
|
||||
for (const [addr, state] of this.stakers.entries()) {
|
||||
const netPrincipal = state.totalPrincipalIn - state.totalAmountOut;
|
||||
if (netPrincipal > BigInt(0)) {
|
||||
// share = (netPrincipal / totalPrincipal) * pretokens * PRECISION
|
||||
const scaledShare = (netPrincipal * scaledPretokens) / this.totalPrincipal;
|
||||
state.yieldClaimed += scaledShare;
|
||||
}
|
||||
}
|
||||
|
||||
// Distribute to liq purchasers based on their principal
|
||||
for (const [addr, state] of this.purchasers.entries()) {
|
||||
if (state.liqPrincipalIn > BigInt(0)) {
|
||||
const scaledShare = (state.liqPrincipalIn * scaledPretokens) / this.totalPrincipal;
|
||||
state.yieldClaimed += scaledShare;
|
||||
}
|
||||
}
|
||||
|
||||
// Note: Dust is now tracked in scaled units, will be divided at output
|
||||
// Theoretical total = scaledPretokens, actual distributed is sum of shares
|
||||
// Dust per event is at most (numUsers - 1) units in scaled space
|
||||
}
|
||||
|
||||
if (index > BigInt(0)) {
|
||||
this.latestIndex = index;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Ignore bonding events (2001, 2002)
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Process all messages
|
||||
public processMessages(messages: any[]): void {
|
||||
let validMessages = 0;
|
||||
|
||||
for (const message of messages) {
|
||||
// Skip messages without proper structure
|
||||
if (!message?.header?.messageID || !message?.payload?.assertions) {
|
||||
console.warn('Skipping message:', JSON.stringify(message).slice(0, 100));
|
||||
continue;
|
||||
}
|
||||
|
||||
validMessages++;
|
||||
this.lastMessageId = message.header.messageID;
|
||||
this.lastTimestamp = typeof message.header.timestamp === 'object'
|
||||
? message.header.timestamp.__bigint__
|
||||
: String(message.header.timestamp);
|
||||
|
||||
for (const assertion of message.payload.assertions) {
|
||||
this.processAssertion(assertion, message.payload.assertions);
|
||||
}
|
||||
}
|
||||
|
||||
this.totalMessages = validMessages;
|
||||
}
|
||||
|
||||
// Generate output
|
||||
public generateOutput(): BalanceOutput {
|
||||
const stakersOutput: Record<string, StakerOutput> = {};
|
||||
const purchasersOutput: Record<string, PurchaserOutput> = {};
|
||||
|
||||
// Process stakers
|
||||
for (const [address, state] of this.stakers.entries()) {
|
||||
// Skip if no remaining balance and no yield claimed
|
||||
if (state.totalShares === BigInt(0) && state.yieldClaimed === BigInt(0)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const netPrincipal = state.totalPrincipalIn - state.totalAmountOut;
|
||||
// Divide by YIELD_PRECISION to convert from scaled internal representation
|
||||
const actualYield = state.yieldClaimed / YIELD_PRECISION;
|
||||
|
||||
stakersOutput[address] = {
|
||||
principal: netPrincipal.toString(),
|
||||
shares: state.totalShares.toString(),
|
||||
pretokenYield: actualYield.toString(),
|
||||
};
|
||||
}
|
||||
|
||||
// Process purchasers
|
||||
for (const [address, state] of this.purchasers.entries()) {
|
||||
// Skip if no remaining balance
|
||||
if (state.pretokens === BigInt(0) &&
|
||||
state.liqShares === BigInt(0) &&
|
||||
state.yieldClaimed === BigInt(0)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Divide by YIELD_PRECISION to convert from scaled internal representation
|
||||
const actualYield = state.yieldClaimed / YIELD_PRECISION;
|
||||
const totalPretokens = state.pretokens + actualYield;
|
||||
|
||||
purchasersOutput[address] = {
|
||||
pretokens: state.pretokens.toString(),
|
||||
shares: state.liqShares.toString(),
|
||||
principal: state.liqPrincipalIn.toString(),
|
||||
yield: actualYield.toString(),
|
||||
totalPretokens: totalPretokens.toString(),
|
||||
};
|
||||
}
|
||||
|
||||
// Sum totals from final output to correctly reflect net values
|
||||
let totalLiqEthStaked = BigInt(0);
|
||||
for (const staker of Object.values(stakersOutput)) {
|
||||
totalLiqEthStaked += BigInt(staker.principal);
|
||||
}
|
||||
let totalLiqEthPurchased = BigInt(0);
|
||||
for (const purchaser of Object.values(purchasersOutput)) {
|
||||
totalLiqEthPurchased += BigInt(purchaser.principal);
|
||||
}
|
||||
|
||||
return {
|
||||
stakers: stakersOutput,
|
||||
purchasers: purchasersOutput,
|
||||
metadata: {
|
||||
lastMessageId: this.lastMessageId,
|
||||
lastTimestamp: this.lastTimestamp,
|
||||
latestIndex: this.latestIndex.toString(),
|
||||
totalMessages: this.totalMessages,
|
||||
generatedAt: new Date().toISOString(),
|
||||
yieldDust: this.yieldDust.toString(),
|
||||
totalLiqEthStaked: totalLiqEthStaked.toString(),
|
||||
totalLiqEthPurchased: totalLiqEthPurchased.toString(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Validation checks
|
||||
public validate(): string[] {
|
||||
const errors: string[] = [];
|
||||
|
||||
// Check for negative shares
|
||||
for (const [address, state] of this.stakers.entries()) {
|
||||
if (state.totalShares < BigInt(0)) {
|
||||
errors.push(`Staker ${address} has negative shares: ${state.totalShares.toString()}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [address, state] of this.purchasers.entries()) {
|
||||
if (state.liqShares < BigInt(0)) {
|
||||
errors.push(`Purchaser ${address} has negative liq shares: ${state.liqShares.toString()}`);
|
||||
}
|
||||
if (state.pretokens < BigInt(0)) {
|
||||
errors.push(`Purchaser ${address} has negative pretokens: ${state.pretokens.toString()}`);
|
||||
}
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
}
|
||||
|
||||
// Main execution
|
||||
async function main(): Promise<void> {
|
||||
const inputPath = path.join(__dirname, '..', 'opp_events_current.json');
|
||||
const outputPath = path.join(__dirname, '..', 'balances_output.json');
|
||||
|
||||
console.log('Loading OPP events from:', inputPath);
|
||||
|
||||
// Load and parse JSON
|
||||
const rawData = fs.readFileSync(inputPath, 'utf-8');
|
||||
const messages: OPPMessage[] = JSON.parse(rawData);
|
||||
|
||||
console.log(`Loaded ${messages.length} messages`);
|
||||
|
||||
// Process messages
|
||||
const parser = new OPPBalanceParser();
|
||||
parser.processMessages(messages);
|
||||
|
||||
// Validate
|
||||
const errors = parser.validate();
|
||||
if (errors.length > 0) {
|
||||
console.error('Validation errors:');
|
||||
for (const error of errors) {
|
||||
console.error(' -', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Generate output
|
||||
const output = parser.generateOutput();
|
||||
|
||||
// Write output
|
||||
fs.writeFileSync(outputPath, JSON.stringify(output, null, 2));
|
||||
|
||||
console.log(`Output written to: ${outputPath}`);
|
||||
console.log(`Stakers: ${Object.keys(output.stakers).length}`);
|
||||
console.log(`Purchasers: ${Object.keys(output.purchasers).length}`);
|
||||
console.log(`Latest index: ${output.metadata.latestIndex}`);
|
||||
console.log(`Last message: ${output.metadata.lastMessageId}`);
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('Error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
462
src/relay.ts
Normal file
462
src/relay.ts
Normal file
@@ -0,0 +1,462 @@
|
||||
#!/usr/bin/env ts-node
|
||||
/**
|
||||
* OPP Operator Relay
|
||||
*
|
||||
* Automates the Ethereum → Wire depot OPP message flow:
|
||||
* 1. Reads parsed Ethereum outpost events (OPP assertions)
|
||||
* 2. Encodes them into Wire depot wire-format payloads
|
||||
* 3. Submits epoch chains from multiple operators (4-of-7 consensus)
|
||||
* 4. Uploads messages via uploadmsgs
|
||||
* 5. Cranks to process READY messages
|
||||
*
|
||||
* Usage:
|
||||
* WIRE_NODE=http://192.168.50.183:8888 \
|
||||
* CLIO=/path/to/clio \
|
||||
* ts-node src/relay.ts [--events opp_events_current.json] [--epoch-size 10]
|
||||
*/
|
||||
|
||||
import { execSync } from "child_process";
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as crypto from "crypto";
|
||||
import {
|
||||
OPPMessage,
|
||||
OPPAssertion,
|
||||
BigIntSerialized,
|
||||
ASSERTION_TYPES,
|
||||
} from "./types";
|
||||
|
||||
// ── Config ───────────────────────────────────────────────────────────────────
|
||||
|
||||
const WIRE_NODE = process.env.WIRE_NODE || "http://localhost:8888";
|
||||
const CLIO = process.env.CLIO || "clio";
|
||||
const EPOCH_SIZE = parseInt(process.env.EPOCH_SIZE || "10", 10);
|
||||
|
||||
// 7 batch operators registered on the testnet
|
||||
const OPERATORS = [
|
||||
"testoperator",
|
||||
"testoper2345",
|
||||
"testoper345a",
|
||||
"testoperaaaa",
|
||||
"testoperbbbb",
|
||||
"testopercccc",
|
||||
"testoperdddd",
|
||||
];
|
||||
const CONSENSUS_QUORUM = 4;
|
||||
|
||||
// Depot assertion type IDs (matching depot.types.hpp)
|
||||
const DEPOT_ASSERTION = {
|
||||
BALANCE_SHEET: 0xaa00,
|
||||
STAKE_UPDATE: 0xee00,
|
||||
YIELD_REWARD: 0xee01,
|
||||
WIRE_PURCHASE: 0xee02,
|
||||
OPERATOR_REGISTRATION: 0xee03,
|
||||
CHALLENGE_RESPONSE: 0xee04,
|
||||
SLASH_OPERATOR: 0xee05,
|
||||
};
|
||||
|
||||
// ── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
function toBigInt(v: string | BigIntSerialized | number): bigint {
|
||||
if (typeof v === "number") return BigInt(v);
|
||||
if (typeof v === "string") return BigInt(v);
|
||||
return BigInt(v.__bigint__);
|
||||
}
|
||||
|
||||
function clio(args: string): string {
|
||||
const cmd = `${CLIO} -u ${WIRE_NODE} ${args}`;
|
||||
try {
|
||||
return execSync(cmd, {
|
||||
encoding: "utf-8",
|
||||
timeout: 30000,
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
});
|
||||
} catch (e: any) {
|
||||
console.error(`clio error: ${e.stderr || e.message}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function clioJson(args: string): any {
|
||||
const out = clio(args);
|
||||
try {
|
||||
return JSON.parse(out);
|
||||
} catch {
|
||||
return out;
|
||||
}
|
||||
}
|
||||
|
||||
function getDepotState(): any {
|
||||
const resp = clioJson("get table sysio.depot sysio.depot depotstate");
|
||||
return resp.rows?.[0];
|
||||
}
|
||||
|
||||
// ── Wire Format Encoding ─────────────────────────────────────────────────────
|
||||
|
||||
function writeBE(buf: Buffer, offset: number, value: bigint, bytes: number) {
|
||||
for (let i = bytes - 1; i >= 0; i--) {
|
||||
buf[offset + i] = Number(value & 0xffn);
|
||||
value >>= 8n;
|
||||
}
|
||||
}
|
||||
|
||||
function nameToUint64(name: string): bigint {
|
||||
const charmap = ".12345abcdefghijklmnopqrstuvwxyz";
|
||||
let n = 0n;
|
||||
for (let i = 0; i < Math.min(name.length, 13); i++) {
|
||||
const c = BigInt(charmap.indexOf(name[i]) || 0);
|
||||
if (i < 12) {
|
||||
n |= (c & 0x1fn) << BigInt(64 - 5 * (i + 1));
|
||||
} else {
|
||||
n |= c & 0x0fn;
|
||||
}
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
function symbolCodeRaw(sym: string): bigint {
|
||||
let raw = 0n;
|
||||
for (let i = 0; i < Math.min(sym.length, 7); i++) {
|
||||
raw |= BigInt(sym.charCodeAt(i)) << BigInt(8 * i);
|
||||
}
|
||||
return raw;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map an Ethereum OPP assertion to a depot wire-format message.
|
||||
* Returns: { assertionType: number, payload: Buffer } or null if unmappable.
|
||||
*/
|
||||
function mapAssertionToDepot(
|
||||
assertion: OPPAssertion
|
||||
): { assertionType: number; payload: Buffer } | null {
|
||||
switch (assertion.assertionType) {
|
||||
case ASSERTION_TYPES.STAKE:
|
||||
case ASSERTION_TYPES.UNSTAKE: {
|
||||
// Map to stake_update (0xEE00)
|
||||
// Payload: sym_code(8) + delta(8) + wire_account(8) + direction(1)
|
||||
const ethSym = symbolCodeRaw("ETH");
|
||||
const isStake = assertion.assertionType === ASSERTION_TYPES.STAKE;
|
||||
// Ethereum amounts are in Wei (18 decimals). Depot ETH reserve uses 8 decimals.
|
||||
// Scale: Wei / 10^10 = 8-decimal ETH
|
||||
const rawAmount = toBigInt(
|
||||
assertion.args.principal || assertion.args.amount || "0"
|
||||
);
|
||||
const amount = rawAmount / 10000000000n; // Wei -> 8-decimal ETH
|
||||
if (amount <= 0n) return null;
|
||||
// The staker field is an Ethereum address, but we need a Wire account name.
|
||||
// For the relay, we map it to a deterministic Wire name (first 12 chars of hex).
|
||||
// In production, this would use the user's registered Wire account.
|
||||
const staker = String(assertion.args.staker || "");
|
||||
const wireAccount = nameToUint64("testoperator"); // Default for testnet
|
||||
|
||||
const buf = Buffer.alloc(25);
|
||||
writeBE(buf, 0, ethSym, 8);
|
||||
writeBE(buf, 8, amount, 8);
|
||||
writeBE(buf, 16, wireAccount, 8);
|
||||
buf[24] = isStake ? 0 : 1;
|
||||
|
||||
return { assertionType: DEPOT_ASSERTION.STAKE_UPDATE, payload: buf };
|
||||
}
|
||||
|
||||
case ASSERTION_TYPES.PRETOKEN_PURCHASE: {
|
||||
// Map to wire_purchase (0xEE02)
|
||||
// Payload: source_sym(8) + source_amount(8) + buyer_account(8)
|
||||
const ethSym = symbolCodeRaw("ETH");
|
||||
const rawEthIn = toBigInt(assertion.args.ethIn || "0");
|
||||
const ethIn = rawEthIn / 10000000000n; // Wei -> 8-decimal ETH
|
||||
if (ethIn <= 0n) return null;
|
||||
const buyer = nameToUint64("testoperator"); // Default for testnet
|
||||
|
||||
const buf = Buffer.alloc(24);
|
||||
writeBE(buf, 0, ethSym, 8);
|
||||
writeBE(buf, 8, ethIn, 8);
|
||||
writeBE(buf, 16, buyer, 8);
|
||||
|
||||
return { assertionType: DEPOT_ASSERTION.WIRE_PURCHASE, payload: buf };
|
||||
}
|
||||
|
||||
case ASSERTION_TYPES.LIQ_PRETOKEN_PURCHASE:
|
||||
case ASSERTION_TYPES.YIELD_PRETOKEN_PURCHASE: {
|
||||
// Map to yield_reward (0xEE01)
|
||||
const ethSym = symbolCodeRaw("ETH");
|
||||
const rawPrincipal = toBigInt(assertion.args.principal || "0");
|
||||
const principal = rawPrincipal / 10000000000n; // Wei -> 8-decimal ETH
|
||||
if (principal <= 0n) return null;
|
||||
const beneficiary = nameToUint64("testoperator");
|
||||
|
||||
const buf = Buffer.alloc(24);
|
||||
writeBE(buf, 0, ethSym, 8);
|
||||
writeBE(buf, 8, principal, 8);
|
||||
writeBE(buf, 16, beneficiary, 8);
|
||||
|
||||
return { assertionType: DEPOT_ASSERTION.YIELD_REWARD, payload: buf };
|
||||
}
|
||||
|
||||
case ASSERTION_TYPES.BONDED_ACTOR:
|
||||
case ASSERTION_TYPES.UNBONDED_ACTOR: {
|
||||
// Map to operator_registration (0xEE03)
|
||||
// Payload: wire_account(8) + op_type(1) + action(1) + secp_key(33) + ed_key(32) = 75
|
||||
const isBond = assertion.assertionType === ASSERTION_TYPES.BONDED_ACTOR;
|
||||
const wireAccount = nameToUint64("testoperator");
|
||||
|
||||
const buf = Buffer.alloc(75);
|
||||
writeBE(buf, 0, wireAccount, 8);
|
||||
buf[8] = 1; // operator_type_batch
|
||||
buf[9] = isBond ? 0 : 1; // 0=register, 1=deregister
|
||||
// Dummy keys for testnet
|
||||
buf[10] = 0x02; // compressed secp prefix
|
||||
buf[42] = 0xff;
|
||||
buf[43] = 0x01; // ed25519 first byte
|
||||
|
||||
return {
|
||||
assertionType: DEPOT_ASSERTION.OPERATOR_REGISTRATION,
|
||||
payload: buf,
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
// Unknown Ethereum assertion type — pass through as balance_sheet stub
|
||||
// so it doesn't block the pipeline. Unknown types in the depot are
|
||||
// silently skipped.
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode depot messages into the uploadmsgs wire format:
|
||||
* [assertion_type(2 BE) | payload_len(4 BE) | payload(N)] repeated
|
||||
*/
|
||||
function encodeMessages(
|
||||
msgs: { assertionType: number; payload: Buffer }[]
|
||||
): Buffer {
|
||||
const parts: Buffer[] = [];
|
||||
for (const msg of msgs) {
|
||||
const header = Buffer.alloc(6);
|
||||
header.writeUInt16BE(msg.assertionType, 0);
|
||||
header.writeUInt32BE(msg.payload.length, 2);
|
||||
parts.push(header, msg.payload);
|
||||
}
|
||||
return Buffer.concat(parts);
|
||||
}
|
||||
|
||||
// ── Relay Pipeline ───────────────────────────────────────────────────────────
|
||||
|
||||
interface RelayResult {
|
||||
epoch: number;
|
||||
messagesProcessed: number;
|
||||
submitchainTxs: string[];
|
||||
uploadmsgsTx: string;
|
||||
crankTx: string;
|
||||
reservesBefore: any;
|
||||
reservesAfter: any;
|
||||
}
|
||||
|
||||
async function relayEpoch(
|
||||
depotMessages: { assertionType: number; payload: Buffer }[]
|
||||
): Promise<RelayResult> {
|
||||
// Always re-read state to get the current epoch
|
||||
const state = getDepotState();
|
||||
const epoch = state.current_epoch;
|
||||
|
||||
console.log(`\n=== Relay Epoch ${epoch} (${depotMessages.length} messages) ===`);
|
||||
|
||||
// Capture reserves before
|
||||
const reservesBefore = clioJson("get table sysio.depot 2 reserves");
|
||||
|
||||
// Generate epoch hash from messages
|
||||
const messagesBuf = encodeMessages(depotMessages);
|
||||
const merkleHash = crypto
|
||||
.createHash("sha256")
|
||||
.update(messagesBuf)
|
||||
.digest("hex");
|
||||
const epochHashInput = Buffer.from(merkleHash + "0".repeat(64), "hex");
|
||||
const epochHash = crypto
|
||||
.createHash("sha256")
|
||||
.update(epochHashInput)
|
||||
.digest("hex");
|
||||
|
||||
// Step 1: submitchain from CONSENSUS_QUORUM operators
|
||||
const dummySig =
|
||||
"0".repeat(128) + "ff"; // 65-byte dummy signature hex
|
||||
const prevHash = "0".repeat(64);
|
||||
|
||||
const submitTxs: string[] = [];
|
||||
let successCount = 0;
|
||||
for (let i = 0; i < OPERATORS.length && successCount < CONSENSUS_QUORUM; i++) {
|
||||
const op = OPERATORS[i];
|
||||
try {
|
||||
const result = clio(
|
||||
`push action sysio.depot submitchain '{"operator_account":"${op}", "epoch_number":${epoch}, "epoch_hash":"${epochHash}", "prev_epoch_hash":"${prevHash}", "merkle_root":"${merkleHash}", "signature":"${dummySig}"}' -p ${op}@active`
|
||||
);
|
||||
const txMatch = result.match(/executed transaction: ([a-f0-9]+)/);
|
||||
submitTxs.push(txMatch ? txMatch[1].substring(0, 12) : "ok");
|
||||
successCount++;
|
||||
console.log(` submitchain from ${op}: OK`);
|
||||
} catch (e: any) {
|
||||
const errMsg = e.stderr || e.message;
|
||||
if (errMsg.includes("already submitted")) {
|
||||
// Already submitted — skip
|
||||
submitTxs.push("ALREADY");
|
||||
successCount++; // count as success — vote already recorded
|
||||
console.log(` submitchain from ${op}: already submitted`);
|
||||
} else if (errMsg.includes("not elected")) {
|
||||
// Not elected for this epoch — try next operator
|
||||
continue;
|
||||
} else {
|
||||
console.error(` submitchain from ${op}: FAILED`);
|
||||
submitTxs.push("FAILED");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (successCount < CONSENSUS_QUORUM) {
|
||||
console.error(` Only ${successCount}/${CONSENSUS_QUORUM} submissions — cannot proceed`);
|
||||
}
|
||||
|
||||
// Step 2: uploadmsgs
|
||||
const messagesHex = messagesBuf.toString("hex");
|
||||
console.log(` uploadmsgs (${messagesHex.length / 2} bytes)...`);
|
||||
let uploadTx = "";
|
||||
try {
|
||||
const result = clio(
|
||||
`push action sysio.depot uploadmsgs '{"operator_account":"${OPERATORS[0]}", "epoch_number":${epoch}, "messages":"${messagesHex}", "merkle_proofs":""}' -p ${OPERATORS[0]}@active`
|
||||
);
|
||||
const txMatch = result.match(/executed transaction: ([a-f0-9]+)/);
|
||||
uploadTx = txMatch ? txMatch[1] : "ok";
|
||||
} catch (e: any) {
|
||||
console.error(` uploadmsgs failed: ${e.message}`);
|
||||
uploadTx = "FAILED";
|
||||
}
|
||||
|
||||
// Step 3: crank
|
||||
console.log(` crank from ${OPERATORS[0]}...`);
|
||||
let crankTx = "";
|
||||
try {
|
||||
const result = clio(
|
||||
`push action sysio.depot crank '{"operator_account":"${OPERATORS[0]}"}' -p ${OPERATORS[0]}@active`
|
||||
);
|
||||
const txMatch = result.match(/executed transaction: ([a-f0-9]+)/);
|
||||
crankTx = txMatch ? txMatch[1] : "ok";
|
||||
} catch (e: any) {
|
||||
console.error(` crank failed: ${e.message}`);
|
||||
crankTx = "FAILED";
|
||||
}
|
||||
|
||||
// Capture reserves after
|
||||
const reservesAfter = clioJson("get table sysio.depot 2 reserves");
|
||||
|
||||
return {
|
||||
epoch,
|
||||
messagesProcessed: depotMessages.length,
|
||||
submitchainTxs: submitTxs,
|
||||
uploadmsgsTx: uploadTx,
|
||||
crankTx: crankTx,
|
||||
reservesBefore: reservesBefore?.rows,
|
||||
reservesAfter: reservesAfter?.rows,
|
||||
};
|
||||
}
|
||||
|
||||
// ── Main ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
async function main() {
|
||||
const eventsFile =
|
||||
process.argv.find((a) => a.startsWith("--events="))?.split("=")[1] ||
|
||||
path.join(__dirname, "..", "opp_events_current.json");
|
||||
|
||||
console.log("Wire OPP Operator Relay");
|
||||
console.log("=======================");
|
||||
console.log(`Node: ${WIRE_NODE}`);
|
||||
console.log(`Events: ${eventsFile}`);
|
||||
console.log(`Epoch size: ${EPOCH_SIZE}`);
|
||||
|
||||
// Check depot state
|
||||
const state = getDepotState();
|
||||
if (!state) {
|
||||
console.error("ERROR: Depot not initialized. Run init first.");
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`Current epoch: ${state.current_epoch}`);
|
||||
console.log(`State: ${state.state === 0 ? "ACTIVE" : state.state === 1 ? "CHALLENGE" : "PAUSED"}`);
|
||||
|
||||
// Load events
|
||||
if (!fs.existsSync(eventsFile)) {
|
||||
console.log("No events file found. Running in balance_sheet relay mode.");
|
||||
// Send a single balance_sheet assertion with current ETH state
|
||||
const ethSym = symbolCodeRaw("ETH");
|
||||
const buf = Buffer.alloc(25);
|
||||
writeBE(buf, 0, ethSym, 8);
|
||||
writeBE(buf, 8, 12000000000000n, 8); // 120000.00000000 ETH
|
||||
writeBE(buf, 16, 6000000000000n, 8); // 600000000.0000 WIRE
|
||||
buf[24] = 8; // precision
|
||||
|
||||
const result = await relayEpoch([
|
||||
{ assertionType: DEPOT_ASSERTION.BALANCE_SHEET, payload: buf },
|
||||
]);
|
||||
console.log("\nResult:", JSON.stringify(result, null, 2));
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse events
|
||||
const raw = fs.readFileSync(eventsFile, "utf-8");
|
||||
const messages: OPPMessage[] = JSON.parse(raw);
|
||||
console.log(`Loaded ${messages.length} OPP messages from Ethereum`);
|
||||
|
||||
// Map Ethereum assertions to depot messages
|
||||
const depotMessages: { assertionType: number; payload: Buffer }[] = [];
|
||||
let skipped = 0;
|
||||
|
||||
for (const msg of messages) {
|
||||
if (!msg.payload?.assertions) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
for (const assertion of msg.payload.assertions) {
|
||||
const mapped = mapAssertionToDepot(assertion);
|
||||
if (mapped) {
|
||||
depotMessages.push(mapped);
|
||||
} else {
|
||||
skipped++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Mapped ${depotMessages.length} assertions to depot format (${skipped} skipped)`
|
||||
);
|
||||
|
||||
// Relay in epoch-sized batches
|
||||
const maxEpochs = parseInt(process.env.MAX_EPOCHS || "5", 10);
|
||||
const results: RelayResult[] = [];
|
||||
for (let i = 0; i < depotMessages.length && results.length < maxEpochs; i += EPOCH_SIZE) {
|
||||
const batch = depotMessages.slice(i, i + EPOCH_SIZE);
|
||||
try {
|
||||
const result = await relayEpoch(batch);
|
||||
results.push(result);
|
||||
console.log(
|
||||
` => Epoch ${result.epoch}: ${result.messagesProcessed} msgs, crank: ${result.crankTx === "FAILED" ? "FAILED" : result.crankTx.substring(0, 12)}`
|
||||
);
|
||||
if (result.crankTx === "FAILED") {
|
||||
console.error(" Crank failed — stopping relay.");
|
||||
break;
|
||||
}
|
||||
} catch (e: any) {
|
||||
console.error(` Epoch relay error: ${e.message}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Save results
|
||||
const outputFile = path.join(
|
||||
__dirname,
|
||||
"..",
|
||||
`relay_results_${new Date().toISOString().replace(/[:.]/g, "-")}.json`
|
||||
);
|
||||
fs.writeFileSync(outputFile, JSON.stringify(results, null, 2));
|
||||
console.log(`\nResults saved to ${outputFile}`);
|
||||
console.log(`Total: ${results.length} epochs relayed, ${depotMessages.length} messages`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("Fatal:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
90
src/types.ts
Normal file
90
src/types.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
// OPP Event Types
|
||||
|
||||
export interface BigIntSerialized {
|
||||
__bigint__: string;
|
||||
}
|
||||
|
||||
export interface OPPHeader {
|
||||
messageID: string;
|
||||
previousMessageID: string;
|
||||
payloadHash: string;
|
||||
timestamp: string | BigIntSerialized;
|
||||
messageNumber: number | BigIntSerialized;
|
||||
}
|
||||
|
||||
export interface OPPAssertion {
|
||||
assertionType: number;
|
||||
assertionName: string;
|
||||
args: Record<string, string | BigIntSerialized>;
|
||||
}
|
||||
|
||||
export interface OPPPayload {
|
||||
protocolVersion: number;
|
||||
encodingFlags: number;
|
||||
assertions: OPPAssertion[];
|
||||
}
|
||||
|
||||
export interface OPPMessage {
|
||||
header: OPPHeader;
|
||||
payload: OPPPayload;
|
||||
}
|
||||
|
||||
// Balance Tracking State
|
||||
|
||||
export interface StakerState {
|
||||
totalShares: bigint; // Current shares balance
|
||||
totalPrincipalIn: bigint; // Sum of stake principals
|
||||
totalAmountOut: bigint; // Sum of unstake amounts
|
||||
yieldClaimed: bigint; // Yield distributed as pretokens (from 3006)
|
||||
}
|
||||
|
||||
export interface PurchaserState {
|
||||
pretokens: bigint; // Direct pretoken purchases (3005)
|
||||
liqShares: bigint; // Yield-bearing shares (3004)
|
||||
liqPrincipalIn: bigint; // Principal for liq positions (3004)
|
||||
yieldClaimed: bigint; // Yield converted to pretokens (3006)
|
||||
}
|
||||
|
||||
// Output Types
|
||||
|
||||
export interface StakerOutput {
|
||||
principal: string; // Net principal (in - out)
|
||||
shares: string; // Raw shares for verification
|
||||
pretokenYield: string; // Yield distributed as pretokens (from 3006)
|
||||
}
|
||||
|
||||
export interface PurchaserOutput {
|
||||
pretokens: string; // Direct pretoken purchases (3005)
|
||||
shares: string; // Liq position shares (3004)
|
||||
principal: string; // ETH contributed to liq (3004)
|
||||
yield: string; // Yield allocated as pretokens (3006)
|
||||
totalPretokens: string; // pretokens + yield
|
||||
}
|
||||
|
||||
export interface BalanceMetadata {
|
||||
lastMessageId: string;
|
||||
lastTimestamp: string;
|
||||
latestIndex: string;
|
||||
totalMessages: number;
|
||||
generatedAt: string;
|
||||
yieldDust: string;
|
||||
totalLiqEthStaked: string;
|
||||
totalLiqEthPurchased: string;
|
||||
}
|
||||
|
||||
export interface BalanceOutput {
|
||||
stakers: Record<string, StakerOutput>;
|
||||
purchasers: Record<string, PurchaserOutput>;
|
||||
metadata: BalanceMetadata;
|
||||
}
|
||||
|
||||
// Assertion type constants
|
||||
export const ASSERTION_TYPES = {
|
||||
BONDED_ACTOR: 2001,
|
||||
UNBONDED_ACTOR: 2002,
|
||||
STAKE: 3001,
|
||||
UNSTAKE: 3002,
|
||||
LIQ_PRETOKEN_PURCHASE: 3004,
|
||||
PRETOKEN_PURCHASE: 3005,
|
||||
YIELD_PRETOKEN_PURCHASE: 3006,
|
||||
} as const;
|
||||
19
tsconfig.json
Normal file
19
tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "commonjs",
|
||||
"lib": ["ES2020"],
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
Reference in New Issue
Block a user