diff --git a/miner-app/lib/features/miner/miner_app_bar.dart b/miner-app/lib/features/miner/miner_app_bar.dart index 0d817bfc..7b8f3ca0 100644 --- a/miner-app/lib/features/miner/miner_app_bar.dart +++ b/miner-app/lib/features/miner/miner_app_bar.dart @@ -53,7 +53,9 @@ class _MinerAppBarState extends State { } void _goToSettingScreen() { - Navigator.of(context).push(MaterialPageRoute(builder: (context) => const SettingsScreen())); + Navigator.of( + context, + ).push(MaterialPageRoute(builder: (context) => const SettingsScreen())); } @override @@ -64,17 +66,31 @@ class _MinerAppBarState extends State { floating: true, pinned: false, flexibleSpace: ClipRRect( - borderRadius: const BorderRadius.only(bottomLeft: Radius.circular(24), bottomRight: Radius.circular(24)), + borderRadius: const BorderRadius.only( + bottomLeft: Radius.circular(24), + bottomRight: Radius.circular(24), + ), child: BackdropFilter( - filter: ColorFilter.mode(Colors.black.useOpacity(0.1), BlendMode.srcOver), + filter: ColorFilter.mode( + Colors.black.useOpacity(0.1), + BlendMode.srcOver, + ), child: Container( decoration: BoxDecoration( gradient: LinearGradient( begin: Alignment.topCenter, end: Alignment.bottomCenter, - colors: [Colors.white.useOpacity(0.1), Colors.white.useOpacity(0.05)], + colors: [ + Colors.white.useOpacity(0.1), + Colors.white.useOpacity(0.05), + ], + ), + border: Border( + bottom: BorderSide( + color: Colors.white.useOpacity(0.1), + width: 1, + ), ), - border: Border(bottom: BorderSide(color: Colors.white.useOpacity(0.1), width: 1)), ), child: Padding( padding: const EdgeInsets.symmetric(horizontal: 20, vertical: 12), @@ -100,11 +116,16 @@ class _MinerAppBarState extends State { decoration: BoxDecoration( borderRadius: BorderRadius.circular(12), color: Colors.white.useOpacity(0.1), - border: Border.all(color: Colors.white.useOpacity(0.2), width: 1), + border: Border.all( + color: Colors.white.useOpacity(0.2), + width: 1, + ), ), child: PopupMenuButton<_MenuValues>( color: const Color(0xFF1A1A1A), - shape: RoundedRectangleBorder(borderRadius: BorderRadius.circular(16)), + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(16), + ), onSelected: (_MenuValues item) async { switch (item) { case _MenuValues.logout: @@ -115,35 +136,57 @@ class _MinerAppBarState extends State { break; } }, - itemBuilder: (BuildContext context) => >[ - PopupMenuItem<_MenuValues>( - value: _MenuValues.logout, - child: Row( - children: [ - Icon(Icons.logout, color: Colors.red.useOpacity(0.8), size: 20), - const SizedBox(width: 12), - Text( - 'Logout (Full Reset)', - style: TextStyle(color: Colors.white.useOpacity(0.9), fontSize: 14), + itemBuilder: (BuildContext context) => + >[ + PopupMenuItem<_MenuValues>( + value: _MenuValues.logout, + child: Row( + children: [ + Icon( + Icons.logout, + color: Colors.red.useOpacity(0.8), + size: 20, + ), + const SizedBox(width: 12), + Text( + 'Logout (Full Reset)', + style: TextStyle( + color: Colors.white.useOpacity(0.9), + fontSize: 14, + ), + ), + ], ), - ], - ), - ), - PopupMenuItem<_MenuValues>( - value: _MenuValues.setting, - child: Row( - children: [ - Icon(Icons.settings, color: Colors.grey.useOpacity(0.8), size: 20), - const SizedBox(width: 12), - Text('Settings', style: TextStyle(color: Colors.white.useOpacity(0.9), fontSize: 14)), - ], - ), - ), - ], + ), + PopupMenuItem<_MenuValues>( + value: _MenuValues.setting, + child: Row( + children: [ + Icon( + Icons.settings, + color: Colors.grey.useOpacity(0.8), + size: 20, + ), + const SizedBox(width: 12), + Text( + 'Settings', + style: TextStyle( + color: Colors.white.useOpacity(0.9), + fontSize: 14, + ), + ), + ], + ), + ), + ], child: Center( child: Padding( padding: const EdgeInsets.symmetric(horizontal: 8.0), - child: Icon(Icons.menu, color: Colors.white.useOpacity(0.7), size: 20), + child: Icon( + Icons.menu, + color: Colors.white.useOpacity(0.7), + size: 20, + ), ), ), ), diff --git a/miner-app/lib/features/miner/miner_balance_card.dart b/miner-app/lib/features/miner/miner_balance_card.dart index f152360b..27d90ddd 100644 --- a/miner-app/lib/features/miner/miner_balance_card.dart +++ b/miner-app/lib/features/miner/miner_balance_card.dart @@ -1,16 +1,14 @@ import 'dart:async'; -import 'dart:io'; import 'package:flutter/material.dart'; -import 'package:polkadart/polkadart.dart'; -import 'package:quantus_miner/src/config/miner_config.dart'; -import 'package:quantus_miner/src/services/binary_manager.dart'; import 'package:quantus_miner/src/services/miner_settings_service.dart'; +import 'package:quantus_miner/src/services/miner_wallet_service.dart'; +import 'package:quantus_miner/src/services/transfer_tracking_service.dart'; +import 'package:quantus_miner/src/services/wormhole_address_manager.dart'; import 'package:quantus_miner/src/shared/extensions/snackbar_extensions.dart'; -import 'package:quantus_miner/src/shared/miner_app_constants.dart'; import 'package:quantus_miner/src/utils/app_logger.dart'; -import 'package:quantus_sdk/quantus_sdk.dart'; -import 'package:quantus_sdk/generated/schrodinger/schrodinger.dart'; +import 'package:quantus_sdk/quantus_sdk.dart' + hide WormholeAddressManager, TrackedWormholeAddress, WormholeAddressPurpose; final _log = log.withTag('BalanceCard'); @@ -18,28 +16,47 @@ class MinerBalanceCard extends StatefulWidget { /// Current block number - when this changes, balance is refreshed final int currentBlock; - const MinerBalanceCard({super.key, this.currentBlock = 0}); + /// Callback when withdraw button is pressed + final void Function(BigInt balance, String address, String secretHex)? + onWithdraw; + + /// Increment this to force a balance refresh (e.g., after withdrawal) + final int refreshKey; + + const MinerBalanceCard({ + super.key, + this.currentBlock = 0, + this.onWithdraw, + this.refreshKey = 0, + }); @override State createState() => _MinerBalanceCardState(); } class _MinerBalanceCardState extends State { - String _walletBalance = 'Loading...'; - String? _walletAddress; - String _chainId = MinerConfig.defaultChainId; + final _walletService = MinerWalletService(); + final _addressManager = WormholeAddressManager(); + final _transferTrackingService = TransferTrackingService(); + + String _rewardsBalance = 'Loading...'; + String? _wormholeAddress; + String? _secretHex; + BigInt _balancePlanck = BigInt.zero; + int _unspentTransferCount = 0; + bool _canTrackBalance = false; + bool _canWithdraw = false; + bool _isLoading = true; Timer? _balanceTimer; - final _settingsService = MinerSettingsService(); int _lastRefreshedBlock = 0; @override void initState() { super.initState(); - - _loadChainAndFetchBalance(); - // Start automatic polling as backup - _balanceTimer = Timer.periodic(MinerConfig.balancePollingInterval, (_) { - _loadChainAndFetchBalance(); + _loadWalletAndBalance(); + // Poll every 30 seconds for balance updates + _balanceTimer = Timer.periodic(const Duration(seconds: 30), (_) { + _fetchBalance(); }); } @@ -49,7 +66,11 @@ class _MinerBalanceCardState extends State { // Refresh balance when block number increases (new block found) if (widget.currentBlock > _lastRefreshedBlock && widget.currentBlock > 0) { _lastRefreshedBlock = widget.currentBlock; - _loadChainAndFetchBalance(); + _fetchBalance(); + } + // Refresh balance when refreshKey changes (e.g., after withdrawal) + if (widget.refreshKey != oldWidget.refreshKey) { + _fetchBalance(); } } @@ -59,110 +80,203 @@ class _MinerBalanceCardState extends State { super.dispose(); } - Future _loadChainAndFetchBalance() async { - final chainId = await _settingsService.getChainId(); - if (mounted) { - setState(() => _chainId = chainId); - } - await _fetchWalletBalance(); - } + Future _loadWalletAndBalance() async { + setState(() => _isLoading = true); - Future _fetchWalletBalance() async { - _log.d('Fetching wallet balance for chain: $_chainId'); try { - final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); - final rewardsFile = File('$quantusHome/rewards-address.txt'); - - if (await rewardsFile.exists()) { - final address = (await rewardsFile.readAsString()).trim(); - - if (address.isNotEmpty) { - final chainConfig = MinerConfig.getChainById(_chainId); - _log.d('Chain: ${chainConfig.id}, rpcUrl: ${chainConfig.rpcUrl}, isLocal: ${chainConfig.isLocalNode}'); - BigInt balance; - - if (chainConfig.isLocalNode) { - // Use local node RPC for dev chain - _log.d('Querying balance from local node: ${chainConfig.rpcUrl}'); - balance = await _queryBalanceFromLocalNode(address, chainConfig.rpcUrl); - } else { - // Use SDK's SubstrateService for remote chains (dirac) - _log.d('Querying balance from remote (SDK SubstrateService)'); - balance = await SubstrateService().queryBalance(address); - } - - _log.d('Balance: $balance'); - - if (mounted) { - setState(() { - _walletBalance = NumberFormattingService().formatBalance(balance, addSymbol: true); - _walletAddress = address; - }); - } + // Ensure RPC endpoint is configured for the current chain + final settingsService = MinerSettingsService(); + final chainId = await settingsService.getChainId(); + _log.i('Loading balance with chain: $chainId'); + + // Check if we have a mnemonic (can derive secret for balance tracking) + final canWithdraw = await _walletService.canWithdraw(); + _canTrackBalance = canWithdraw; + + if (canWithdraw) { + // We have the mnemonic - get the full key pair + final keyPair = await _walletService.getWormholeKeyPair(); + if (keyPair != null) { + _wormholeAddress = keyPair.address; + _secretHex = keyPair.secretHex; + _canWithdraw = true; + await _fetchBalanceWithSecret(keyPair.address, keyPair.secretHex); } else { - _handleAddressNotSet(); + _handleNotSetup(); } } else { - _handleAddressNotSet(); + // Only preimage - we can show the address but not track balance + final preimage = await _walletService.readRewardsPreimageFile(); + if (preimage != null) { + // We have a preimage but can't derive the address without the secret + setState(() { + _wormholeAddress = null; + _rewardsBalance = 'Import wallet to track'; + _isLoading = false; + }); + } else { + _handleNotSetup(); + } } } catch (e) { - if (mounted) { - setState(() { - // Show helpful message for dev chain when node not running - if (_chainId == 'dev') { - _walletBalance = 'Start node to view'; - } else { - _walletBalance = 'Error'; - } - }); + _log.e('Error loading wallet', error: e); + setState(() { + _rewardsBalance = 'Error'; + _isLoading = false; + }); + } + } + + Future _fetchBalance() async { + if (!_canTrackBalance) return; + + try { + final keyPair = await _walletService.getWormholeKeyPair(); + if (keyPair != null) { + await _fetchBalanceWithSecret(keyPair.address, keyPair.secretHex); } - _log.w('Error fetching wallet balance', error: e); + } catch (e) { + _log.w('Error fetching balance', error: e); } } - /// Query balance directly from local node using Polkadart - Future _queryBalanceFromLocalNode(String address, String rpcUrl) async { + Future _fetchBalanceWithSecret(String address, String secretHex) async { try { - final provider = Provider.fromUri(Uri.parse(rpcUrl)); - final quantusApi = Schrodinger(provider); + // Initialize address manager and transfer tracking + await _addressManager.initialize(); - // Convert SS58 address to account ID using the SDK's crypto - final accountId = ss58ToAccountId(s: address); + // Get chain config for RPC URL + final settingsService = MinerSettingsService(); + final chainConfig = await settingsService.getChainConfig(); - final accountInfo = await quantusApi.query.system.account(accountId); - return accountInfo.data.free; - } catch (e) { - _log.d('Error querying local node balance: $e'); - // Return zero if node is not running or address has no balance - return BigInt.zero; + // Initialize transfer tracking with all known addresses + final allAddresses = _addressManager.allAddressStrings; + if (allAddresses.isEmpty) { + _transferTrackingService.initialize( + rpcUrl: chainConfig.rpcUrl, + wormholeAddresses: {address}, + ); + } else { + _transferTrackingService.initialize( + rpcUrl: chainConfig.rpcUrl, + wormholeAddresses: allAddresses, + ); + } + await _transferTrackingService.loadFromDisk(); + + _log.i('=== BALANCE QUERY DEBUG ==='); + _log.i('Primary address (SS58): $address'); + _log.i('Total tracked addresses: ${_addressManager.allAddresses.length}'); + _log.i('==========================='); + + // Get unspent transfers for all tracked addresses + var totalBalance = BigInt.zero; + var totalUnspentCount = 0; + + // Check primary address + final primaryUnspent = await _transferTrackingService.getUnspentTransfers( + wormholeAddress: address, + secretHex: secretHex, + ); + for (final transfer in primaryUnspent) { + totalBalance += transfer.amount; + totalUnspentCount++; + } + _log.i( + 'Primary address: ${primaryUnspent.length} unspent, ${_formatBalance(totalBalance)}', + ); + + // Check other tracked addresses (change addresses) + for (final tracked in _addressManager.allAddresses) { + if (tracked.address == address) + continue; // Skip primary, already counted + + final unspent = await _transferTrackingService.getUnspentTransfers( + wormholeAddress: tracked.address, + secretHex: tracked.secretHex, + ); + for (final transfer in unspent) { + totalBalance += transfer.amount; + totalUnspentCount++; + } + if (unspent.isNotEmpty) { + final addrBalance = unspent.fold( + BigInt.zero, + (sum, t) => sum + t.amount, + ); + _log.i( + 'Change address ${tracked.address}: ${unspent.length} unspent, ${_formatBalance(addrBalance)}', + ); + } + } + + _log.i( + 'Total withdrawable: $totalUnspentCount UTXOs, ${_formatBalance(totalBalance)}', + ); + + if (mounted) { + setState(() { + _rewardsBalance = NumberFormattingService().formatBalance( + totalBalance, + addSymbol: true, + ); + _wormholeAddress = address; + _secretHex = secretHex; + _balancePlanck = totalBalance; + _unspentTransferCount = totalUnspentCount; + _isLoading = false; + }); + } + } catch (e, st) { + _log.e('Error fetching balance', error: e, stackTrace: st); + if (mounted) { + setState(() { + _rewardsBalance = 'Unable to connect'; + _isLoading = false; + }); + } } } - void _handleAddressNotSet() { + String _formatBalance(BigInt planck) { + return NumberFormattingService().formatBalance(planck, addSymbol: true); + } + + void _handleNotSetup() { if (mounted) { setState(() { - _walletBalance = 'Address not set'; - _walletAddress = null; + _rewardsBalance = 'Not configured'; + _wormholeAddress = null; + _isLoading = false; }); } - _log.w('Rewards address file not found or empty'); } @override Widget build(BuildContext context) { return Container( margin: const EdgeInsets.only(bottom: 20), - height: MinerAppConstants.cardHeight, decoration: BoxDecoration( gradient: LinearGradient( begin: Alignment.topLeft, end: Alignment.bottomRight, - colors: [Colors.white.useOpacity(0.1), Colors.white.useOpacity(0.05)], + colors: [ + Colors.white.withValues(alpha: 0.1), + Colors.white.withValues(alpha: 0.05), + ], ), borderRadius: BorderRadius.circular(24), - border: Border.all(color: Colors.white.useOpacity(0.1), width: 1), + border: Border.all( + color: Colors.white.withValues(alpha: 0.1), + width: 1, + ), boxShadow: [ - BoxShadow(color: Colors.black.useOpacity(0.2), blurRadius: 20, spreadRadius: 1, offset: const Offset(0, 8)), + BoxShadow( + color: Colors.black.withValues(alpha: 0.2), + blurRadius: 20, + spreadRadius: 1, + offset: const Offset(0, 8), + ), ], ), child: Padding( @@ -176,61 +290,85 @@ class _MinerBalanceCardState extends State { padding: const EdgeInsets.all(8), decoration: BoxDecoration( gradient: const LinearGradient( - colors: [ - Color(0xFF6366F1), // Deep purple - Color(0xFF1E3A8A), // Deep blue - ], + colors: [Color(0xFF10B981), Color(0xFF059669)], ), borderRadius: BorderRadius.circular(12), ), - child: const Icon(Icons.account_balance_wallet, color: Colors.white, size: 20), + child: const Icon( + Icons.savings, + color: Colors.white, + size: 20, + ), ), const SizedBox(width: 12), Text( - 'Wallet Balance', - style: TextStyle(fontSize: 18, fontWeight: FontWeight.w600, color: Colors.white.useOpacity(0.9)), + 'Mining Rewards', + style: TextStyle( + fontSize: 18, + fontWeight: FontWeight.w600, + color: Colors.white.withValues(alpha: 0.9), + ), ), ], ), const SizedBox(height: 20), - Text( - _walletBalance, - style: const TextStyle( - fontSize: 32, - fontWeight: FontWeight.w700, - color: Color(0xFF6366F1), // Deep purple - letterSpacing: -1, + if (_isLoading) + const SizedBox( + height: 32, + width: 32, + child: CircularProgressIndicator(strokeWidth: 2), + ) + else + Text( + _rewardsBalance, + style: const TextStyle( + fontSize: 32, + fontWeight: FontWeight.w700, + color: Color(0xFF10B981), + letterSpacing: -1, + ), ), - ), - if (_walletAddress != null) ...[ + if (_wormholeAddress != null) ...[ const SizedBox(height: 12), Container( padding: const EdgeInsets.all(12), decoration: BoxDecoration( - color: Colors.white.useOpacity(0.05), + color: Colors.white.withValues(alpha: 0.05), borderRadius: BorderRadius.circular(12), - border: Border.all(color: Colors.white.useOpacity(0.1), width: 1), + border: Border.all( + color: Colors.white.withValues(alpha: 0.1), + width: 1, + ), ), child: Row( children: [ - Icon(Icons.link, color: Colors.white.useOpacity(0.5), size: 16), + Icon( + Icons.link, + color: Colors.white.withValues(alpha: 0.5), + size: 16, + ), const SizedBox(width: 8), Expanded( child: Text( - _walletAddress!, + _wormholeAddress!, style: TextStyle( fontSize: 12, - color: Colors.white.useOpacity(0.6), + color: Colors.white.withValues(alpha: 0.6), fontFamily: 'Fira Code', letterSpacing: 0.5, ), + overflow: TextOverflow.ellipsis, ), ), IconButton( - icon: Icon(Icons.copy, color: Colors.white.useOpacity(0.5), size: 16), + icon: Icon( + Icons.copy, + color: Colors.white.withValues(alpha: 0.5), + size: 16, + ), onPressed: () { - if (_walletAddress != null) { - context.copyTextWithSnackbar(_walletAddress!); + if (_wormholeAddress != null) { + context.copyTextWithSnackbar(_wormholeAddress!); } }, constraints: const BoxConstraints(), @@ -240,6 +378,71 @@ class _MinerBalanceCardState extends State { ), ), ], + if (!_canTrackBalance && !_isLoading) ...[ + const SizedBox(height: 12), + Container( + padding: const EdgeInsets.all(12), + decoration: BoxDecoration( + color: Colors.amber.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all( + color: Colors.amber.withValues(alpha: 0.2), + width: 1, + ), + ), + child: Row( + children: [ + Icon( + Icons.info_outline, + color: Colors.amber.shade300, + size: 16, + ), + const SizedBox(width: 8), + Expanded( + child: Text( + 'Import your full wallet to track balance and withdraw rewards.', + style: TextStyle( + fontSize: 12, + color: Colors.amber.shade200, + ), + ), + ), + ], + ), + ), + ], + // Withdraw button + if (_canWithdraw && + _balancePlanck > BigInt.zero && + !_isLoading) ...[ + const SizedBox(height: 16), + SizedBox( + width: double.infinity, + child: ElevatedButton.icon( + onPressed: () { + if (widget.onWithdraw != null && + _wormholeAddress != null && + _secretHex != null) { + widget.onWithdraw!( + _balancePlanck, + _wormholeAddress!, + _secretHex!, + ); + } + }, + icon: const Icon(Icons.output, size: 18), + label: const Text('Withdraw Rewards'), + style: ElevatedButton.styleFrom( + backgroundColor: const Color(0xFF10B981), + foregroundColor: Colors.white, + padding: const EdgeInsets.symmetric(vertical: 12), + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(12), + ), + ), + ), + ), + ], ], ), ), diff --git a/miner-app/lib/features/miner/miner_controls.dart b/miner-app/lib/features/miner/miner_controls.dart index 49397997..f11c5737 100644 --- a/miner-app/lib/features/miner/miner_controls.dart +++ b/miner-app/lib/features/miner/miner_controls.dart @@ -3,10 +3,12 @@ import 'dart:io'; import 'package:flutter/material.dart'; import 'package:quantus_miner/src/config/miner_config.dart'; +import 'package:quantus_miner/src/services/miner_wallet_service.dart'; import 'package:quantus_miner/src/services/mining_orchestrator.dart'; import 'package:quantus_miner/src/services/mining_stats_service.dart'; import 'package:quantus_miner/src/shared/extensions/snackbar_extensions.dart'; import 'package:quantus_miner/src/utils/app_logger.dart'; +import 'package:quantus_sdk/quantus_sdk.dart'; import '../../main.dart'; import '../../src/services/binary_manager.dart'; @@ -54,7 +56,9 @@ class _MinerControlsState extends State { if (mounted) { setState(() { - _cpuWorkers = savedCpuWorkers ?? (Platform.numberOfProcessors > 0 ? Platform.numberOfProcessors : 8); + _cpuWorkers = + savedCpuWorkers ?? + (Platform.numberOfProcessors > 0 ? Platform.numberOfProcessors : 8); _gpuDevices = savedGpuDevices ?? 0; _chainId = savedChainId; }); @@ -98,10 +102,23 @@ class _MinerControlsState extends State { setState(() => _chainId = chainId); } + // Get rewards preimage directly from the wallet (not from file) + final walletService = MinerWalletService(); + final wormholeKeyPair = await walletService.getWormholeKeyPair(); + if (wormholeKeyPair == null) { + _log.w('No wormhole keypair - wallet not set up'); + if (mounted) { + context.showWarningSnackbar( + title: 'Wallet not configured!', + message: 'Please set up your rewards address first.', + ); + } + return; + } + // Check for required files final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); final identityFile = File('$quantusHome/node_key.p2p'); - final rewardsFile = File('$quantusHome/rewards-address.txt'); final nodeBinPath = await BinaryManager.getNodeBinaryFilePath(); final nodeBin = File(nodeBinPath); final minerBinPath = await BinaryManager.getExternalMinerBinaryFilePath(); @@ -110,11 +127,33 @@ class _MinerControlsState extends State { if (!await nodeBin.exists()) { _log.w('Node binary not found'); if (mounted) { - context.showWarningSnackbar(title: 'Node binary not found!', message: 'Please run setup.'); + context.showWarningSnackbar( + title: 'Node binary not found!', + message: 'Please run setup.', + ); } return; } + // Log comprehensive wormhole derivation info for debugging + _log.i('=== WORMHOLE DERIVATION DEBUG ==='); + _log.i('Preimage (SS58): ${wormholeKeyPair.rewardsPreimage}'); + _log.i('Preimage (hex): ${wormholeKeyPair.rewardsPreimageHex}'); + _log.i('Address (SS58): ${wormholeKeyPair.address}'); + _log.i('Address (hex): ${wormholeKeyPair.addressHex}'); + _log.i( + 'Secret (hex): ${wormholeKeyPair.secretHex.substring(0, 10)}...[redacted]', + ); + + // Verify: compute address from preimage hex and check it matches + final wormholeService = WormholeService(); + final verifiedAddress = wormholeService.preimageToAddress( + wormholeKeyPair.rewardsPreimageHex, + ); + _log.i('Verified addr: $verifiedAddress'); + _log.i('Addresses match: ${verifiedAddress == wormholeKeyPair.address}'); + _log.i('================================='); + // Create new orchestrator final orchestrator = MiningOrchestrator(); widget.onOrchestratorChanged(orchestrator); @@ -125,7 +164,8 @@ class _MinerControlsState extends State { nodeBinary: nodeBin, minerBinary: minerBin, identityFile: identityFile, - rewardsFile: rewardsFile, + rewardsPreimage: wormholeKeyPair.rewardsPreimage, + wormholeAddress: wormholeKeyPair.address, chainId: _chainId, cpuWorkers: _cpuWorkers, gpuDevices: _gpuDevices, @@ -135,7 +175,10 @@ class _MinerControlsState extends State { } catch (e) { _log.e('Error starting node', error: e); if (mounted) { - context.showErrorSnackbar(title: 'Error starting node!', message: e.toString()); + context.showErrorSnackbar( + title: 'Error starting node!', + message: e.toString(), + ); } orchestrator.dispose(); widget.onOrchestratorChanged(null); @@ -182,7 +225,10 @@ class _MinerControlsState extends State { if (widget.orchestrator == null) { if (mounted) { - context.showWarningSnackbar(title: 'Node not running!', message: 'Start the node first.'); + context.showWarningSnackbar( + title: 'Node not running!', + message: 'Start the node first.', + ); } return; } @@ -194,20 +240,29 @@ class _MinerControlsState extends State { if (!await minerBin.exists()) { _log.w('Miner binary not found'); if (mounted) { - context.showWarningSnackbar(title: 'Miner binary not found!', message: 'Please run setup.'); + context.showWarningSnackbar( + title: 'Miner binary not found!', + message: 'Please run setup.', + ); } return; } try { // Update settings in case they changed while miner was stopped - widget.orchestrator!.updateMinerSettings(cpuWorkers: _cpuWorkers, gpuDevices: _gpuDevices); + widget.orchestrator!.updateMinerSettings( + cpuWorkers: _cpuWorkers, + gpuDevices: _gpuDevices, + ); await widget.orchestrator!.startMiner(); } catch (e) { _log.e('Error starting miner', error: e); if (mounted) { - context.showErrorSnackbar(title: 'Error starting miner!', message: e.toString()); + context.showErrorSnackbar( + title: 'Error starting miner!', + message: e.toString(), + ); } } } @@ -234,7 +289,9 @@ class _MinerControlsState extends State { /// Whether miner is starting or running (for disabling settings) bool get _isMinerActive { final state = widget.orchestrator?.state; - return state == MiningState.startingMiner || state == MiningState.mining || state == MiningState.stoppingMiner; + return state == MiningState.startingMiner || + state == MiningState.mining || + state == MiningState.stoppingMiner; } String get _nodeButtonText { @@ -281,15 +338,24 @@ class _MinerControlsState extends State { Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [ - const Text('CPU Workers', style: TextStyle(fontWeight: FontWeight.bold)), + const Text( + 'CPU Workers', + style: TextStyle(fontWeight: FontWeight.bold), + ), Text('$_cpuWorkers'), ], ), Slider( value: _cpuWorkers.toDouble(), min: 0, - max: (Platform.numberOfProcessors > 0 ? Platform.numberOfProcessors : 16).toDouble(), - divisions: (Platform.numberOfProcessors > 0 ? Platform.numberOfProcessors : 16), + max: + (Platform.numberOfProcessors > 0 + ? Platform.numberOfProcessors + : 16) + .toDouble(), + divisions: (Platform.numberOfProcessors > 0 + ? Platform.numberOfProcessors + : 16), label: _cpuWorkers.toString(), onChanged: canEditSettings ? (value) { @@ -313,7 +379,10 @@ class _MinerControlsState extends State { Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [ - const Text('GPU Devices', style: TextStyle(fontWeight: FontWeight.bold)), + const Text( + 'GPU Devices', + style: TextStyle(fontWeight: FontWeight.bold), + ), Text('$_gpuDevices / $_detectedGpuCount'), ], ), @@ -344,8 +413,14 @@ class _MinerControlsState extends State { ElevatedButton( style: ElevatedButton.styleFrom( backgroundColor: _nodeButtonColor, - padding: const EdgeInsets.symmetric(vertical: 15, horizontal: 20), - textStyle: const TextStyle(fontSize: 16, fontWeight: FontWeight.bold), + padding: const EdgeInsets.symmetric( + vertical: 15, + horizontal: 20, + ), + textStyle: const TextStyle( + fontSize: 16, + fontWeight: FontWeight.bold, + ), minimumSize: const Size(140, 50), ), onPressed: _isNodeToggling ? null : _toggleNode, @@ -357,11 +432,19 @@ class _MinerControlsState extends State { ElevatedButton( style: ElevatedButton.styleFrom( backgroundColor: _minerButtonColor, - padding: const EdgeInsets.symmetric(vertical: 15, horizontal: 20), - textStyle: const TextStyle(fontSize: 16, fontWeight: FontWeight.bold), + padding: const EdgeInsets.symmetric( + vertical: 15, + horizontal: 20, + ), + textStyle: const TextStyle( + fontSize: 16, + fontWeight: FontWeight.bold, + ), minimumSize: const Size(140, 50), ), - onPressed: (_isMinerToggling || !_isNodeRunning) ? null : _toggleMiner, + onPressed: (_isMinerToggling || !_isNodeRunning) + ? null + : _toggleMiner, child: Text(_minerButtonText), ), ], @@ -370,7 +453,10 @@ class _MinerControlsState extends State { // Status indicator if (_isNodeRunning && !_isMining) ...[ const SizedBox(height: 12), - Text('Node running - ready to mine', style: TextStyle(color: Colors.green.shade300, fontSize: 12)), + Text( + 'Node running - ready to mine', + style: TextStyle(color: Colors.green.shade300, fontSize: 12), + ), ], ], ); diff --git a/miner-app/lib/features/miner/miner_dashboard_screen.dart b/miner-app/lib/features/miner/miner_dashboard_screen.dart index 0eb05041..f933e657 100644 --- a/miner-app/lib/features/miner/miner_dashboard_screen.dart +++ b/miner-app/lib/features/miner/miner_dashboard_screen.dart @@ -1,6 +1,7 @@ import 'dart:async'; import 'package:flutter/material.dart'; +import 'package:go_router/go_router.dart'; import 'package:quantus_miner/features/miner/miner_balance_card.dart'; import 'package:quantus_miner/features/miner/miner_app_bar.dart'; import 'package:quantus_miner/features/miner/miner_stats_card.dart'; @@ -36,6 +37,9 @@ class _MinerDashboardScreenState extends State { MiningStats _miningStats = MiningStats.empty(); + // Key to force balance card refresh (incremented after withdrawal) + int _balanceRefreshKey = 0; + // The orchestrator manages all mining operations MiningOrchestrator? _orchestrator; @@ -113,7 +117,10 @@ class _MinerDashboardScreenState extends State { if (!mounted) return; // Show error to user - context.showErrorSnackbar(title: _getErrorTitle(error), message: error.message); + context.showErrorSnackbar( + title: _getErrorTitle(error), + message: error.message, + ); } String _getErrorTitle(MinerError error) { @@ -182,7 +189,8 @@ class _MinerDashboardScreenState extends State { onProgress: (progress) { setState(() { if (progress.totalBytes > 0) { - _minerUpdateProgress = progress.downloadedBytes / progress.totalBytes; + _minerUpdateProgress = + progress.downloadedBytes / progress.totalBytes; } else { _minerUpdateProgress = progress.downloadedBytes > 0 ? 1.0 : 0.0; } @@ -244,7 +252,8 @@ class _MinerDashboardScreenState extends State { onProgress: (progress) { setState(() { if (progress.totalBytes > 0) { - _nodeUpdateProgress = progress.downloadedBytes / progress.totalBytes; + _nodeUpdateProgress = + progress.downloadedBytes / progress.totalBytes; } else { _nodeUpdateProgress = progress.downloadedBytes > 0 ? 1.0 : 0.0; } @@ -335,13 +344,20 @@ class _MinerDashboardScreenState extends State { // Logs section SliverToBoxAdapter( child: Padding( - padding: const EdgeInsets.only(left: 20, right: 20, bottom: 20), + padding: const EdgeInsets.only( + left: 20, + right: 20, + bottom: 20, + ), child: Container( height: 430, decoration: BoxDecoration( color: Colors.white.useOpacity(0.05), borderRadius: BorderRadius.circular(20), - border: Border.all(color: Colors.white.useOpacity(0.1), width: 1), + border: Border.all( + color: Colors.white.useOpacity(0.1), + width: 1, + ), ), child: Column( children: [ @@ -349,11 +365,20 @@ class _MinerDashboardScreenState extends State { Container( padding: const EdgeInsets.all(16), decoration: BoxDecoration( - border: Border(bottom: BorderSide(color: Colors.white.useOpacity(0.1), width: 1)), + border: Border( + bottom: BorderSide( + color: Colors.white.useOpacity(0.1), + width: 1, + ), + ), ), child: Row( children: [ - Icon(Icons.terminal, color: Colors.white.useOpacity(0.7), size: 20), + Icon( + Icons.terminal, + color: Colors.white.useOpacity(0.7), + size: 20, + ), const SizedBox(width: 12), Text( 'Live Logs', @@ -367,7 +392,12 @@ class _MinerDashboardScreenState extends State { ), ), // Logs content - Expanded(child: LogsWidget(orchestrator: _orchestrator, maxLines: 200)), + Expanded( + child: LogsWidget( + orchestrator: _orchestrator, + maxLines: 200, + ), + ), ], ), ), @@ -381,13 +411,39 @@ class _MinerDashboardScreenState extends State { ); } + void _onWithdraw(BigInt balance, String address, String secretHex) { + context + .push( + '/withdraw', + extra: { + 'balance': balance, + 'address': address, + 'secretHex': secretHex, + }, + ) + .then((_) { + // Refresh balance when returning from withdrawal screen + if (mounted) { + setState(() { + _balanceRefreshKey++; + }); + } + }); + } + Widget _buildResponsiveCards() { return LayoutBuilder( builder: (context, constraints) { if (constraints.maxWidth > 800) { return Row( children: [ - Expanded(child: MinerBalanceCard(currentBlock: _miningStats.currentBlock)), + Expanded( + child: MinerBalanceCard( + currentBlock: _miningStats.currentBlock, + onWithdraw: _onWithdraw, + refreshKey: _balanceRefreshKey, + ), + ), const SizedBox(width: 16), Expanded(child: MinerStatsCard(miningStats: _miningStats)), ], @@ -395,7 +451,11 @@ class _MinerDashboardScreenState extends State { } else { return Column( children: [ - MinerBalanceCard(currentBlock: _miningStats.currentBlock), + MinerBalanceCard( + currentBlock: _miningStats.currentBlock, + onWithdraw: _onWithdraw, + refreshKey: _balanceRefreshKey, + ), MinerStatsCard(miningStats: _miningStats), ], ); diff --git a/miner-app/lib/features/miner/miner_stats_card.dart b/miner-app/lib/features/miner/miner_stats_card.dart index d9c9c3c3..b74d3a82 100644 --- a/miner-app/lib/features/miner/miner_stats_card.dart +++ b/miner-app/lib/features/miner/miner_stats_card.dart @@ -29,7 +29,10 @@ class _MinerStatsCardState extends State { return Container( padding: const EdgeInsets.all(40), margin: const EdgeInsets.only(bottom: 20), - decoration: BoxDecoration(color: Colors.white.useOpacity(0.05), borderRadius: BorderRadius.circular(20)), + decoration: BoxDecoration( + color: Colors.white.useOpacity(0.05), + borderRadius: BorderRadius.circular(20), + ), child: Row( mainAxisAlignment: MainAxisAlignment.center, children: [ @@ -38,11 +41,16 @@ class _MinerStatsCardState extends State { height: 20, child: CircularProgressIndicator( strokeWidth: 2, - valueColor: AlwaysStoppedAnimation(Colors.white.useOpacity(0.6)), + valueColor: AlwaysStoppedAnimation( + Colors.white.useOpacity(0.6), + ), ), ), const SizedBox(width: 16), - Text('Loading mining stats...', style: TextStyle(color: Colors.white.useOpacity(0.6), fontSize: 16)), + Text( + 'Loading mining stats...', + style: TextStyle(color: Colors.white.useOpacity(0.6), fontSize: 16), + ), ], ), ); @@ -61,7 +69,12 @@ class _MinerStatsCardState extends State { borderRadius: BorderRadius.circular(24), border: Border.all(color: Colors.white.useOpacity(0.1), width: 1), boxShadow: [ - BoxShadow(color: Colors.black.useOpacity(0.2), blurRadius: 20, spreadRadius: 1, offset: const Offset(0, 8)), + BoxShadow( + color: Colors.black.useOpacity(0.2), + blurRadius: 20, + spreadRadius: 1, + offset: const Offset(0, 8), + ), ], ), child: Padding( @@ -83,12 +96,20 @@ class _MinerStatsCardState extends State { ), borderRadius: BorderRadius.circular(14), ), - child: const Icon(Icons.analytics, color: Colors.white, size: 24), + child: const Icon( + Icons.analytics, + color: Colors.white, + size: 24, + ), ), const SizedBox(width: 16), Text( 'Mining Performance - ${_miningStats!.chainName}', - style: TextStyle(fontSize: 18, fontWeight: FontWeight.w600, color: Colors.white.useOpacity(0.9)), + style: TextStyle( + fontSize: 18, + fontWeight: FontWeight.w600, + color: Colors.white.useOpacity(0.9), + ), ), ], ), @@ -100,12 +121,17 @@ class _MinerStatsCardState extends State { Expanded( child: Column( children: [ - _buildCompactStat(icon: Icons.people, label: 'Peers', value: '${_miningStats!.peerCount}'), + _buildCompactStat( + icon: Icons.people, + label: 'Peers', + value: '${_miningStats!.peerCount}', + ), const SizedBox(height: 16), _buildDualStat( icon: Icons.memory, label1: 'CPU', - value1: '${_miningStats!.workers} / ${_miningStats!.cpuCapacity}', + value1: + '${_miningStats!.workers} / ${_miningStats!.cpuCapacity}', label2: 'GPU', value2: '${_miningStats!.gpuDevices} / ${_miningStats!.gpuCapacity > 0 ? _miningStats!.gpuCapacity : (_miningStats!.gpuDevices > 0 ? _miningStats!.gpuDevices : "-")}', @@ -127,7 +153,8 @@ class _MinerStatsCardState extends State { _buildCompactStat( icon: Icons.block, label: 'Block', - value: '${_miningStats!.currentBlock} / ${_miningStats!.targetBlock}', + value: + '${_miningStats!.currentBlock} / ${_miningStats!.targetBlock}', ), ], ), @@ -197,7 +224,11 @@ class _MinerStatsCardState extends State { ], ), const SizedBox(width: 8), - Container(width: 1, height: 28, color: Colors.white.useOpacity(0.3)), + Container( + width: 1, + height: 28, + color: Colors.white.useOpacity(0.3), + ), const SizedBox(width: 8), Column( crossAxisAlignment: CrossAxisAlignment.start, @@ -232,7 +263,11 @@ class _MinerStatsCardState extends State { ); } - Widget _buildCompactStat({required IconData icon, required String label, required String value}) { + Widget _buildCompactStat({ + required IconData icon, + required String label, + required String value, + }) { return Row( children: [ Container( diff --git a/miner-app/lib/features/miner/miner_status.dart b/miner-app/lib/features/miner/miner_status.dart index 5afabe5f..48d63b27 100644 --- a/miner-app/lib/features/miner/miner_status.dart +++ b/miner-app/lib/features/miner/miner_status.dart @@ -16,7 +16,10 @@ class MinerStatus extends StatelessWidget { case MiningStatus.idle: return _StatusConfig( icon: Icons.pause_circle_outline, - colors: [const Color(0xFF64748B), const Color(0xFF475569)], // Slate gray + colors: [ + const Color(0xFF64748B), + const Color(0xFF475569), + ], // Slate gray glowColor: const Color(0xFF64748B), label: 'IDLE', ); @@ -80,7 +83,8 @@ class _StatusBadge extends StatefulWidget { State<_StatusBadge> createState() => _StatusBadgeState(); } -class _StatusBadgeState extends State<_StatusBadge> with TickerProviderStateMixin { +class _StatusBadgeState extends State<_StatusBadge> + with TickerProviderStateMixin { late AnimationController _rotationController; late AnimationController _pulseController; late Animation _pulseAnimation; @@ -90,16 +94,21 @@ class _StatusBadgeState extends State<_StatusBadge> with TickerProviderStateMixi super.initState(); // Rotation animation for syncing - _rotationController = AnimationController(duration: const Duration(seconds: 2), vsync: this); + _rotationController = AnimationController( + duration: const Duration(seconds: 2), + vsync: this, + ); // Pickaxe animation for mining (arcing back and forth) - _pulseController = AnimationController(duration: const Duration(milliseconds: 800), vsync: this); + _pulseController = AnimationController( + duration: const Duration(milliseconds: 800), + vsync: this, + ); // Arc rotation: -30 degrees to +30 degrees (in radians) - _pulseAnimation = Tween( - begin: -0.5, - end: 0.5, - ).animate(CurvedAnimation(parent: _pulseController, curve: Curves.easeInOut)); + _pulseAnimation = Tween(begin: -0.5, end: 0.5).animate( + CurvedAnimation(parent: _pulseController, curve: Curves.easeInOut), + ); _updateAnimations(); } @@ -152,7 +161,13 @@ class _StatusBadgeState extends State<_StatusBadge> with TickerProviderStateMixi end: Alignment.bottomRight, ), borderRadius: BorderRadius.circular(24), - boxShadow: [BoxShadow(color: widget.config.glowColor.useOpacity(0.4), blurRadius: 12, spreadRadius: 2)], + boxShadow: [ + BoxShadow( + color: widget.config.glowColor.useOpacity(0.4), + blurRadius: 12, + spreadRadius: 2, + ), + ], ), child: Row( mainAxisSize: MainAxisSize.min, @@ -164,8 +179,14 @@ class _StatusBadgeState extends State<_StatusBadge> with TickerProviderStateMixi ? (Matrix4.identity()..rotateZ(_pulseAnimation.value)) : Matrix4.identity(), child: RotationTransition( - turns: widget.config.isAnimated ? _rotationController : AlwaysStoppedAnimation(0), - child: Icon(widget.config.icon, color: Colors.white, size: 18), + turns: widget.config.isAnimated + ? _rotationController + : AlwaysStoppedAnimation(0), + child: Icon( + widget.config.icon, + color: Colors.white, + size: 18, + ), ), ), const SizedBox(width: 10), diff --git a/miner-app/lib/features/settings/settings_app_bar.dart b/miner-app/lib/features/settings/settings_app_bar.dart index 2402c167..323e0994 100644 --- a/miner-app/lib/features/settings/settings_app_bar.dart +++ b/miner-app/lib/features/settings/settings_app_bar.dart @@ -18,21 +18,37 @@ class _SettingsAppBarState extends State { floating: true, pinned: false, flexibleSpace: ClipRRect( - borderRadius: const BorderRadius.only(bottomLeft: Radius.circular(24), bottomRight: Radius.circular(24)), + borderRadius: const BorderRadius.only( + bottomLeft: Radius.circular(24), + bottomRight: Radius.circular(24), + ), child: BackdropFilter( - filter: ColorFilter.mode(Colors.black.useOpacity(0.1), BlendMode.srcOver), + filter: ColorFilter.mode( + Colors.black.useOpacity(0.1), + BlendMode.srcOver, + ), child: Container( decoration: BoxDecoration( gradient: LinearGradient( begin: Alignment.topCenter, end: Alignment.bottomCenter, - colors: [Colors.white.useOpacity(0.1), Colors.white.useOpacity(0.05)], + colors: [ + Colors.white.useOpacity(0.1), + Colors.white.useOpacity(0.05), + ], + ), + border: Border( + bottom: BorderSide( + color: Colors.white.useOpacity(0.1), + width: 1, + ), ), - border: Border(bottom: BorderSide(color: Colors.white.useOpacity(0.1), width: 1)), ), child: Padding( padding: const EdgeInsets.symmetric(horizontal: 20, vertical: 12), - child: Center(child: Text('Settings', style: context.textTheme.titleMedium)), + child: Center( + child: Text('Settings', style: context.textTheme.titleMedium), + ), ), ), ), diff --git a/miner-app/lib/features/settings/settings_screen.dart b/miner-app/lib/features/settings/settings_screen.dart index c354afb3..f77da330 100644 --- a/miner-app/lib/features/settings/settings_screen.dart +++ b/miner-app/lib/features/settings/settings_screen.dart @@ -61,8 +61,13 @@ class _SettingsScreenState extends State { context: context, builder: (context) => AlertDialog( backgroundColor: const Color(0xFF1C1C1C), - shape: RoundedRectangleBorder(borderRadius: BorderRadius.circular(16)), - title: const Text('Stop Mining?', style: TextStyle(color: Colors.white)), + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(16), + ), + title: const Text( + 'Stop Mining?', + style: TextStyle(color: Colors.white), + ), content: const Text( 'Changing the chain requires stopping mining first. ' 'Do you want to stop mining and switch chains?', @@ -71,11 +76,16 @@ class _SettingsScreenState extends State { actions: [ TextButton( onPressed: () => Navigator.of(context).pop(false), - child: Text('Cancel', style: TextStyle(color: Colors.white.useOpacity(0.7))), + child: Text( + 'Cancel', + style: TextStyle(color: Colors.white.useOpacity(0.7)), + ), ), TextButton( onPressed: () => Navigator.of(context).pop(true), - style: TextButton.styleFrom(foregroundColor: const Color(0xFF00E676)), + style: TextButton.styleFrom( + foregroundColor: const Color(0xFF00E676), + ), child: const Text('Stop & Switch'), ), ], @@ -99,7 +109,9 @@ class _SettingsScreenState extends State { // Show confirmation ScaffoldMessenger.of(context).showSnackBar( SnackBar( - content: Text('Switched to ${MinerConfig.getChainById(newChainId).displayName}'), + content: Text( + 'Switched to ${MinerConfig.getChainById(newChainId).displayName}', + ), backgroundColor: const Color(0xFF00E676), behavior: SnackBarBehavior.floating, ), @@ -136,7 +148,10 @@ class _SettingsScreenState extends State { SliverToBoxAdapter( child: Padding( - padding: const EdgeInsets.symmetric(horizontal: 20.0, vertical: 16.0), + padding: const EdgeInsets.symmetric( + horizontal: 20.0, + vertical: 16.0, + ), child: Column( crossAxisAlignment: CrossAxisAlignment.start, children: [ @@ -215,14 +230,23 @@ class _SettingsScreenState extends State { color: const Color(0xFF1C1C1C), // Slightly lighter than background borderRadius: BorderRadius.circular(16), border: Border.all(color: Colors.white.useOpacity(0.05), width: 1), - boxShadow: [BoxShadow(color: Colors.black.useOpacity(0.2), blurRadius: 10, offset: const Offset(0, 4))], + boxShadow: [ + BoxShadow( + color: Colors.black.useOpacity(0.2), + blurRadius: 10, + offset: const Offset(0, 4), + ), + ], ), child: Row( children: [ // Icon Container Container( padding: const EdgeInsets.all(10), - decoration: BoxDecoration(color: accentColor.useOpacity(0.1), borderRadius: BorderRadius.circular(12)), + decoration: BoxDecoration( + color: accentColor.useOpacity(0.1), + borderRadius: BorderRadius.circular(12), + ), child: Icon(icon, color: accentColor, size: 20), ), const SizedBox(width: 16), @@ -231,7 +255,11 @@ class _SettingsScreenState extends State { Expanded( child: Text( title, - style: const TextStyle(color: Colors.white, fontSize: 16, fontWeight: FontWeight.w500), + style: const TextStyle( + color: Colors.white, + fontSize: 16, + fontWeight: FontWeight.w500, + ), ), ), @@ -240,7 +268,10 @@ class _SettingsScreenState extends State { SizedBox( width: 16, height: 16, - child: CircularProgressIndicator(strokeWidth: 2, color: Colors.white.useOpacity(0.3)), + child: CircularProgressIndicator( + strokeWidth: 2, + color: Colors.white.useOpacity(0.3), + ), ) else Container( @@ -274,14 +305,23 @@ class _SettingsScreenState extends State { color: const Color(0xFF1C1C1C), borderRadius: BorderRadius.circular(16), border: Border.all(color: Colors.white.useOpacity(0.05), width: 1), - boxShadow: [BoxShadow(color: Colors.black.useOpacity(0.2), blurRadius: 10, offset: const Offset(0, 4))], + boxShadow: [ + BoxShadow( + color: Colors.black.useOpacity(0.2), + blurRadius: 10, + offset: const Offset(0, 4), + ), + ], ), child: Row( children: [ // Icon Container Container( padding: const EdgeInsets.all(10), - decoration: BoxDecoration(color: accentColor.useOpacity(0.1), borderRadius: BorderRadius.circular(12)), + decoration: BoxDecoration( + color: accentColor.useOpacity(0.1), + borderRadius: BorderRadius.circular(12), + ), child: Icon(Icons.link_rounded, color: accentColor, size: 20), ), const SizedBox(width: 16), @@ -293,10 +333,20 @@ class _SettingsScreenState extends State { children: [ const Text( 'Chain', - style: TextStyle(color: Colors.white, fontSize: 16, fontWeight: FontWeight.w500), + style: TextStyle( + color: Colors.white, + fontSize: 16, + fontWeight: FontWeight.w500, + ), ), const SizedBox(height: 2), - Text(selectedChain.description, style: TextStyle(color: Colors.white.useOpacity(0.5), fontSize: 12)), + Text( + selectedChain.description, + style: TextStyle( + color: Colors.white.useOpacity(0.5), + fontSize: 12, + ), + ), ], ), ), @@ -306,7 +356,10 @@ class _SettingsScreenState extends State { SizedBox( width: 16, height: 16, - child: CircularProgressIndicator(strokeWidth: 2, color: Colors.white.useOpacity(0.3)), + child: CircularProgressIndicator( + strokeWidth: 2, + color: Colors.white.useOpacity(0.3), + ), ) else Container( @@ -320,7 +373,10 @@ class _SettingsScreenState extends State { value: _selectedChainId, dropdownColor: const Color(0xFF1C1C1C), underline: const SizedBox(), - icon: Icon(Icons.arrow_drop_down, color: Colors.white.useOpacity(0.7)), + icon: Icon( + Icons.arrow_drop_down, + color: Colors.white.useOpacity(0.7), + ), style: TextStyle( color: Colors.white.useOpacity(0.9), fontFamily: 'Courier', @@ -328,7 +384,10 @@ class _SettingsScreenState extends State { fontSize: 13, ), items: MinerConfig.availableChains.map((chain) { - return DropdownMenuItem(value: chain.id, child: Text(chain.displayName)); + return DropdownMenuItem( + value: chain.id, + child: Text(chain.displayName), + ); }).toList(), onChanged: _onChainChanged, ), diff --git a/miner-app/lib/features/setup/node_identity_setup_screen.dart b/miner-app/lib/features/setup/node_identity_setup_screen.dart index c58f6604..55bc4d29 100644 --- a/miner-app/lib/features/setup/node_identity_setup_screen.dart +++ b/miner-app/lib/features/setup/node_identity_setup_screen.dart @@ -8,7 +8,8 @@ class NodeIdentitySetupScreen extends StatefulWidget { const NodeIdentitySetupScreen({super.key}); @override - State createState() => _NodeIdentitySetupScreenState(); + State createState() => + _NodeIdentitySetupScreenState(); } class _NodeIdentitySetupScreenState extends State { @@ -88,7 +89,10 @@ class _NodeIdentitySetupScreenState extends State { children: [ const Icon(Icons.check_circle, color: Colors.green, size: 80), const SizedBox(height: 16), - const Text('Node Identity Set!', style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold)), + const Text( + 'Node Identity Set!', + style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold), + ), const SizedBox(height: 24), ElevatedButton( onPressed: () { @@ -106,7 +110,10 @@ class _NodeIdentitySetupScreenState extends State { children: [ SvgPicture.asset('assets/logo/logo.svg', width: 80, height: 80), const SizedBox(height: 16), - const Text('Node Identity not set.', style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold)), + const Text( + 'Node Identity not set.', + style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold), + ), const SizedBox(height: 8), const Text( 'You need to set a node identity to continue.', diff --git a/miner-app/lib/features/setup/node_setup_screen.dart b/miner-app/lib/features/setup/node_setup_screen.dart index 6e86da7c..a5665651 100644 --- a/miner-app/lib/features/setup/node_setup_screen.dart +++ b/miner-app/lib/features/setup/node_setup_screen.dart @@ -36,7 +36,8 @@ class _NodeSetupScreenState extends State { final String nodeBinaryPath = await BinaryManager.getNodeBinaryFilePath(); final bool nodeInstalled = await File(nodeBinaryPath).exists(); - final String minerBinaryPath = await BinaryManager.getExternalMinerBinaryFilePath(); + final String minerBinaryPath = + await BinaryManager.getExternalMinerBinaryFilePath(); final bool minerInstalled = await File(minerBinaryPath).exists(); setState(() { @@ -78,12 +79,15 @@ class _NodeSetupScreenState extends State { if (mounted) { setState(() { if (progress.totalBytes > 0) { - _downloadProgress = progress.downloadedBytes / progress.totalBytes; + _downloadProgress = + progress.downloadedBytes / progress.totalBytes; _downloadProgressText = "Node: ${(progress.downloadedBytes / (1024 * 1024)).toStringAsFixed(2)} MB / ${(progress.totalBytes / (1024 * 1024)).toStringAsFixed(2)} MB"; } else { _downloadProgress = progress.downloadedBytes > 0 ? 1.0 : 0.0; - _downloadProgressText = progress.downloadedBytes > 0 ? "Node Downloaded" : "Downloading Node..."; + _downloadProgressText = progress.downloadedBytes > 0 + ? "Node Downloaded" + : "Downloading Node..."; } }); } @@ -110,12 +114,15 @@ class _NodeSetupScreenState extends State { if (mounted) { setState(() { if (progress.totalBytes > 0) { - _downloadProgress = progress.downloadedBytes / progress.totalBytes; + _downloadProgress = + progress.downloadedBytes / progress.totalBytes; _downloadProgressText = "Miner: ${(progress.downloadedBytes / (1024 * 1024)).toStringAsFixed(2)} MB / ${(progress.totalBytes / (1024 * 1024)).toStringAsFixed(2)} MB"; } else { _downloadProgress = progress.downloadedBytes > 0 ? 1.0 : 0.0; - _downloadProgressText = progress.downloadedBytes > 0 ? "Miner Downloaded" : "Downloading Miner..."; + _downloadProgressText = progress.downloadedBytes > 0 + ? "Miner Downloaded" + : "Downloading Miner..."; } }); } @@ -147,14 +154,15 @@ class _NodeSetupScreenState extends State { }); } if (mounted) { - ScaffoldMessenger.of( - context, - ).showSnackBar(SnackBar(content: Text('Error installing binaries: ${e.toString()}'))); + ScaffoldMessenger.of(context).showSnackBar( + SnackBar(content: Text('Error installing binaries: ${e.toString()}')), + ); } } } - bool get _allBinariesInstalled => _isNodeInstalled && _isExternalMinerInstalled; + bool get _allBinariesInstalled => + _isNodeInstalled && _isExternalMinerInstalled; @override Widget build(BuildContext context) { @@ -164,13 +172,22 @@ class _NodeSetupScreenState extends State { bodyContent = Column( mainAxisAlignment: MainAxisAlignment.center, children: [ - Text('Installing Mining Software...', style: Theme.of(context).textTheme.headlineSmall), + Text( + 'Installing Mining Software...', + style: Theme.of(context).textTheme.headlineSmall, + ), const SizedBox(height: 8), - Text(_currentDownloadingBinary, style: Theme.of(context).textTheme.titleMedium), + Text( + _currentDownloadingBinary, + style: Theme.of(context).textTheme.titleMedium, + ), const SizedBox(height: 20), Padding( padding: const EdgeInsets.symmetric(horizontal: 40.0), - child: LinearProgressIndicator(value: _downloadProgress, minHeight: 10), + child: LinearProgressIndicator( + value: _downloadProgress, + minHeight: 10, + ), ), const SizedBox(height: 10), Text(_downloadProgressText), @@ -196,7 +213,10 @@ class _NodeSetupScreenState extends State { children: [ const Icon(Icons.check_circle, color: Colors.green, size: 80), const SizedBox(height: 16), - const Text('Mining Software Installed!', style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold)), + const Text( + 'Mining Software Installed!', + style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold), + ), const SizedBox(height: 8), Column( children: [ @@ -237,7 +257,10 @@ class _NodeSetupScreenState extends State { children: [ SvgPicture.asset('assets/logo/logo.svg', width: 80, height: 80), const SizedBox(height: 16), - const Text('Mining software not found.', style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold)), + const Text( + 'Mining software not found.', + style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold), + ), const SizedBox(height: 8), const Text( 'You need to install the node and external miner to continue.', @@ -279,7 +302,9 @@ class _NodeSetupScreenState extends State { ElevatedButton.icon( onPressed: _installBinaries, icon: const Icon(Icons.download), - label: Text(_allBinariesInstalled ? 'All Installed' : 'Install Mining Software'), + label: Text( + _allBinariesInstalled ? 'All Installed' : 'Install Mining Software', + ), style: ElevatedButton.styleFrom( padding: const EdgeInsets.symmetric(horizontal: 24, vertical: 12), textStyle: const TextStyle(fontSize: 18), diff --git a/miner-app/lib/features/setup/rewards_address_setup_screen.dart b/miner-app/lib/features/setup/rewards_address_setup_screen.dart index dbe44c53..36b9118e 100644 --- a/miner-app/lib/features/setup/rewards_address_setup_screen.dart +++ b/miner-app/lib/features/setup/rewards_address_setup_screen.dart @@ -1,96 +1,137 @@ -import 'dart:io'; - import 'package:flash/flash_helper.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:flutter_svg/flutter_svg.dart'; import 'package:go_router/go_router.dart'; -import 'package:quantus_miner/src/services/binary_manager.dart'; +import 'package:quantus_miner/src/services/miner_wallet_service.dart'; import 'package:quantus_miner/src/shared/extensions/snackbar_extensions.dart'; import 'package:quantus_sdk/quantus_sdk.dart'; +/// Setup screen for miner wallet (mnemonic-based wormhole address). +/// +/// Users can either generate a new 24-word mnemonic or import an existing one. +/// The mnemonic is used to derive a wormhole address where mining rewards are sent. class RewardsAddressSetupScreen extends StatefulWidget { const RewardsAddressSetupScreen({super.key}); @override - State createState() => _RewardsAddressSetupScreenState(); + State createState() => + _RewardsAddressSetupScreenState(); } +enum _ImportMode { mnemonic, preimage } + class _RewardsAddressSetupScreenState extends State { - bool _isLoading = true; - final TextEditingController _addressController = TextEditingController(); - final FocusNode _focusNode = FocusNode(); + final MinerWalletService _walletService = MinerWalletService(); - @override - void initState() { - super.initState(); - _checkRewardsAddress(); - _addressController.addListener(() { - if (mounted) setState(() {}); - }); - } + bool _isLoading = false; + bool _showImportView = false; + _ImportMode _importMode = _ImportMode.mnemonic; + + // Generated mnemonic flow + String? _generatedMnemonic; + bool _mnemonicConfirmed = false; + + // Import mnemonic flow + final TextEditingController _importController = TextEditingController(); + final FocusNode _importFocusNode = FocusNode(); + String? _importError; + + // Result after saving + WormholeKeyPair? _savedKeyPair; + + // For preimage-only flow (no keypair available) + String? _savedPreimageOnly; @override void dispose() { - _addressController.dispose(); - _focusNode.dispose(); + _importController.dispose(); + _importFocusNode.dispose(); super.dispose(); } - Future _checkRewardsAddress() async { + /// Generate a new 24-word mnemonic. + void _generateNewMnemonic() { + setState(() { + _generatedMnemonic = _walletService.generateMnemonic(); + _mnemonicConfirmed = false; + }); + } + + /// Save the generated mnemonic and derive the wormhole address. + Future _saveGeneratedMnemonic() async { + if (_generatedMnemonic == null) return; + setState(() { _isLoading = true; }); try { - final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); - final rewardsFile = File('$quantusHome/rewards-address.txt'); - - if (await rewardsFile.exists()) { - final address = await rewardsFile.readAsString(); - if (address.trim().isNotEmpty) { - setState(() { - _addressController.text = address.trim(); - }); - print('Rewards address found: $address'); - } - } + final keyPair = await _walletService.saveMnemonic(_generatedMnemonic!); + setState(() { + _savedKeyPair = keyPair; + }); } catch (e) { - print('Error checking rewards address: $e'); + if (mounted) { + context.showErrorSnackbar( + title: 'Error', + message: 'Failed to save wallet: $e', + ); + } } finally { + if (mounted) { + setState(() { + _isLoading = false; + }); + } + } + } + + /// Validate and save an imported mnemonic. + Future _saveImportedMnemonic() async { + final mnemonic = _importController.text.trim(); + + if (mnemonic.isEmpty) { setState(() { - _isLoading = false; + _importError = 'Please enter your recovery phrase'; }); + return; } - } - Future _saveRewardsAddress() async { - final address = _addressController.text.trim(); - if (address.isEmpty) { - context.showErrorSnackbar(title: 'Error', message: 'Please enter a valid address'); + // Validate word count + final words = mnemonic.split(RegExp(r'\s+')); + if (words.length != 24) { + setState(() { + _importError = + 'Recovery phrase must be exactly 24 words (got ${words.length})'; + }); + return; + } + + // Validate using MinerWalletService + if (!_walletService.validateMnemonic(mnemonic)) { + setState(() { + _importError = 'Invalid recovery phrase. Please check your words.'; + }); return; } setState(() { _isLoading = true; + _importError = null; }); try { - final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); - final rewardsFile = File('$quantusHome/rewards-address.txt'); - await rewardsFile.writeAsString(address); - - print('Rewards address saved: $address'); - - if (mounted) { - context.showSuccessBar(content: const Text('Rewards address saved successfully!')); - // Navigate to the main mining screen - context.go('/miner_dashboard'); - } + final keyPair = await _walletService.saveMnemonic(mnemonic); + setState(() { + _savedKeyPair = keyPair; + }); } catch (e) { - print('Error saving rewards address: $e'); if (mounted) { - context.showErrorSnackbar(title: 'Error', message: 'Error saving address: $e'); + context.showErrorSnackbar( + title: 'Error', + message: 'Failed to save wallet: $e', + ); } } finally { if (mounted) { @@ -101,187 +142,688 @@ class _RewardsAddressSetupScreenState extends State { } } - void _showQrOverlay() { - showDialog( - context: context, - builder: (BuildContext context) { - return Stack( + /// Continue to the miner dashboard. + void _continueToMining() { + context.go('/miner_dashboard'); + } + + /// Copy text to clipboard with feedback. + Future _copyToClipboard(String text, String label) async { + await Clipboard.setData(ClipboardData(text: text)); + if (mounted) { + context.showSuccessBar(content: Text('$label copied to clipboard')); + } + } + + @override + Widget build(BuildContext context) { + final canGoBack = + _showImportView && _savedKeyPair == null && _savedPreimageOnly == null; + + return Scaffold( + appBar: AppBar( + title: const Text('Wallet Setup'), + leading: canGoBack + ? IconButton( + icon: const Icon(Icons.arrow_back), + onPressed: () { + setState(() { + _showImportView = false; + _importController.clear(); + _importError = null; + _importMode = _ImportMode.mnemonic; + }); + }, + ) + : null, + ), + body: _isLoading + ? const Center(child: CircularProgressIndicator()) + : _savedKeyPair != null + ? _buildSuccessView() + : _savedPreimageOnly != null + ? _buildPreimageOnlySuccessView() + : _showImportView + ? _buildImportView() + : _generatedMnemonic != null + ? _buildGeneratedMnemonicView() + : _buildInitialChoiceView(), + ); + } + + /// Initial view: Choose to generate or import a wallet. + Widget _buildInitialChoiceView() { + return Center( + child: SingleChildScrollView( + padding: const EdgeInsets.all(24.0), + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + crossAxisAlignment: CrossAxisAlignment.stretch, children: [ - // Semi-transparent dark background handled by Dialog's barrierColor, - // but we can ensure high contrast content here - Center( - child: Material( - color: Colors.transparent, - child: Container( - padding: const EdgeInsets.all(24), - margin: const EdgeInsets.all(24), - decoration: BoxDecoration( - color: Colors.black87, - borderRadius: BorderRadius.circular(16), - boxShadow: [ - BoxShadow(color: Colors.black.useOpacity(0.5), blurRadius: 20, offset: const Offset(0, 10)), - ], + SvgPicture.asset('assets/logo/logo.svg', width: 80, height: 80), + const SizedBox(height: 24), + const Text( + 'Set Up Rewards Wallet', + style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold), + textAlign: TextAlign.center, + ), + const SizedBox(height: 8), + const Text( + 'Your mining rewards will be sent to a wormhole address derived from a recovery phrase.', + textAlign: TextAlign.center, + style: TextStyle(fontSize: 16, color: Colors.grey), + ), + const SizedBox(height: 48), + ElevatedButton.icon( + onPressed: _generateNewMnemonic, + icon: const Icon(Icons.add_circle_outline), + label: const Text('Create New Wallet'), + style: ElevatedButton.styleFrom( + padding: const EdgeInsets.symmetric(vertical: 16), + textStyle: const TextStyle(fontSize: 18), + ), + ), + const SizedBox(height: 16), + OutlinedButton.icon( + onPressed: () { + setState(() { + _showImportView = true; + }); + }, + icon: const Icon(Icons.download), + label: const Text('Import Existing Wallet'), + style: OutlinedButton.styleFrom( + padding: const EdgeInsets.symmetric(vertical: 16), + textStyle: const TextStyle(fontSize: 18), + ), + ), + const SizedBox(height: 48), + Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.amber.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.amber.withValues(alpha: 0.3)), + ), + child: Row( + children: [ + const Icon(Icons.info_outline, color: Colors.amber), + const SizedBox(width: 12), + Expanded( + child: Text( + 'If you already have a Quantus mobile wallet, you can use the same recovery phrase to receive rewards to the same account.', + style: TextStyle( + fontSize: 14, + color: Colors.amber.shade200, + ), + ), + ), + ], + ), + ), + ], + ), + ), + ); + } + + /// View showing the generated mnemonic for backup. + Widget _buildGeneratedMnemonicView() { + final words = _generatedMnemonic!.split(' '); + + return SingleChildScrollView( + padding: const EdgeInsets.all(24.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.stretch, + children: [ + const Icon(Icons.security, size: 48, color: Colors.amber), + const SizedBox(height: 16), + const Text( + 'Write Down Your Recovery Phrase', + style: TextStyle(fontSize: 22, fontWeight: FontWeight.bold), + textAlign: TextAlign.center, + ), + const SizedBox(height: 8), + const Text( + 'Store these 24 words safely. You will need them to recover your wallet and withdraw mining rewards.', + textAlign: TextAlign.center, + style: TextStyle(fontSize: 14, color: Colors.grey), + ), + const SizedBox(height: 24), + Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.grey.shade900, + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.grey.shade700), + ), + child: Column( + children: [ + // Grid of words + GridView.builder( + shrinkWrap: true, + physics: const NeverScrollableScrollPhysics(), + gridDelegate: const SliverGridDelegateWithFixedCrossAxisCount( + crossAxisCount: 3, + childAspectRatio: 2.5, + crossAxisSpacing: 8, + mainAxisSpacing: 8, ), - child: Column( - mainAxisSize: MainAxisSize.min, - children: [ - Stack( + itemCount: words.length, + itemBuilder: (context, index) { + return Container( + padding: const EdgeInsets.symmetric( + horizontal: 8, + vertical: 4, + ), + decoration: BoxDecoration( + color: Colors.grey.shade800, + borderRadius: BorderRadius.circular(6), + ), + child: Row( children: [ - Container( - width: 40, // spacer for alignment + Text( + '${index + 1}.', + style: TextStyle( + color: Colors.grey.shade500, + fontSize: 12, + ), ), - Positioned( - right: 0, - top: 0, - child: GestureDetector( - onTap: () => Navigator.of(context).pop(), - child: const Icon(Icons.close, color: Colors.white, size: 24), + const SizedBox(width: 4), + Expanded( + child: Text( + words[index], + style: const TextStyle( + fontWeight: FontWeight.w500, + fontSize: 13, + ), + overflow: TextOverflow.ellipsis, ), ), ], ), - const SizedBox(height: 24), - Container( - padding: const EdgeInsets.all(16), - decoration: BoxDecoration( - color: Colors.black, - borderRadius: BorderRadius.circular(12), - border: Border.all(color: Colors.white24), - ), - child: Image.asset( - 'assets/tr-ee-u1vxT1-qrcode-white.png', // White QR on dark bg - width: 250, - height: 250, - ), - ), - const SizedBox(height: 24), - const Text( - 'Scan with your mobile phone\nto set up your wallet', - textAlign: TextAlign.center, - style: TextStyle(color: Colors.white, fontSize: 16, fontWeight: FontWeight.w500), - ), - const SizedBox(height: 24), - OutlinedButton( - onPressed: () => Navigator.of(context).pop(), - style: OutlinedButton.styleFrom( - foregroundColor: Colors.white, - side: const BorderSide(color: Colors.white), - padding: const EdgeInsets.symmetric(horizontal: 32, vertical: 16), - ), - child: const Text('Close'), - ), - ], + ); + }, + ), + const SizedBox(height: 12), + TextButton.icon( + onPressed: () => + _copyToClipboard(_generatedMnemonic!, 'Recovery phrase'), + icon: const Icon(Icons.copy, size: 18), + label: const Text('Copy to clipboard'), + ), + ], + ), + ), + const SizedBox(height: 24), + Container( + padding: const EdgeInsets.all(12), + decoration: BoxDecoration( + color: Colors.red.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(8), + border: Border.all(color: Colors.red.withValues(alpha: 0.3)), + ), + child: Row( + children: [ + const Icon(Icons.warning_amber, color: Colors.red, size: 20), + const SizedBox(width: 8), + Expanded( + child: Text( + 'Never share your recovery phrase. Anyone with these words can access your funds.', + style: TextStyle(fontSize: 13, color: Colors.red.shade200), ), ), - ), + ], ), - ], - ); - }, + ), + const SizedBox(height: 24), + CheckboxListTile( + value: _mnemonicConfirmed, + onChanged: (value) { + setState(() { + _mnemonicConfirmed = value ?? false; + }); + }, + title: const Text( + 'I have written down my recovery phrase and stored it safely', + style: TextStyle(fontSize: 14), + ), + controlAffinity: ListTileControlAffinity.leading, + contentPadding: EdgeInsets.zero, + ), + const SizedBox(height: 16), + ElevatedButton( + onPressed: _mnemonicConfirmed ? _saveGeneratedMnemonic : null, + style: ElevatedButton.styleFrom( + padding: const EdgeInsets.symmetric(vertical: 16), + textStyle: const TextStyle(fontSize: 18), + ), + child: const Text('Continue'), + ), + const SizedBox(height: 8), + TextButton( + onPressed: () { + setState(() { + _generatedMnemonic = null; + _mnemonicConfirmed = false; + }); + }, + child: const Text('Go Back'), + ), + ], + ), ); } - @override - Widget build(BuildContext context) { - return Scaffold( - appBar: AppBar(title: const Text('Rewards Address Setup')), - body: Center( - child: _isLoading - ? const CircularProgressIndicator() - : SingleChildScrollView( - padding: const EdgeInsets.all(24.0), - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - crossAxisAlignment: CrossAxisAlignment.stretch, + /// Validate and save an imported preimage (no mnemonic). + Future _saveImportedPreimage() async { + final preimage = _importController.text.trim(); + + if (preimage.isEmpty) { + setState(() { + _importError = 'Please enter your rewards preimage'; + }); + return; + } + + if (!_walletService.validatePreimage(preimage)) { + setState(() { + _importError = + 'Invalid preimage format. Expected SS58-encoded address.'; + }); + return; + } + + setState(() { + _isLoading = true; + _importError = null; + }); + + try { + await _walletService.savePreimageOnly(preimage); + setState(() { + _savedPreimageOnly = preimage; + }); + } catch (e) { + if (mounted) { + context.showErrorSnackbar( + title: 'Error', + message: 'Failed to save preimage: $e', + ); + } + } finally { + if (mounted) { + setState(() { + _isLoading = false; + }); + } + } + } + + /// View for importing an existing mnemonic or preimage. + Widget _buildImportView() { + return Center( + child: SingleChildScrollView( + padding: const EdgeInsets.all(24.0), + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + crossAxisAlignment: CrossAxisAlignment.stretch, + children: [ + Icon( + _importMode == _ImportMode.mnemonic ? Icons.download : Icons.key, + size: 48, + color: Colors.blue, + ), + const SizedBox(height: 16), + Text( + _importMode == _ImportMode.mnemonic + ? 'Import Recovery Phrase' + : 'Import Rewards Preimage', + style: const TextStyle(fontSize: 22, fontWeight: FontWeight.bold), + textAlign: TextAlign.center, + ), + const SizedBox(height: 8), + Text( + _importMode == _ImportMode.mnemonic + ? 'Enter your 24-word recovery phrase to restore your wallet.' + : 'Enter your rewards preimage (SS58 format) from the CLI or another source.', + textAlign: TextAlign.center, + style: const TextStyle(fontSize: 14, color: Colors.grey), + ), + const SizedBox(height: 24), + + // Toggle between mnemonic and preimage mode + SegmentedButton<_ImportMode>( + segments: const [ + ButtonSegment( + value: _ImportMode.mnemonic, + label: Text('Recovery Phrase'), + icon: Icon(Icons.vpn_key), + ), + ButtonSegment( + value: _ImportMode.preimage, + label: Text('Preimage Only'), + icon: Icon(Icons.key), + ), + ], + selected: {_importMode}, + onSelectionChanged: (selected) { + setState(() { + _importMode = selected.first; + _importController.clear(); + _importError = null; + }); + }, + ), + const SizedBox(height: 24), + + TextField( + controller: _importController, + focusNode: _importFocusNode, + maxLines: _importMode == _ImportMode.mnemonic ? 4 : 2, + decoration: InputDecoration( + labelText: _importMode == _ImportMode.mnemonic + ? 'Recovery Phrase' + : 'Rewards Preimage', + hintText: _importMode == _ImportMode.mnemonic + ? 'Enter your 24 words separated by spaces' + : 'e.g., qXYZ123...', + border: const OutlineInputBorder(), + errorText: _importError, + suffixIcon: IconButton( + icon: const Icon(Icons.paste), + onPressed: () async { + final data = await Clipboard.getData(Clipboard.kTextPlain); + if (data?.text != null) { + _importController.text = data!.text!; + setState(() { + _importError = null; + }); + } + }, + tooltip: 'Paste from clipboard', + ), + ), + onChanged: (_) { + if (_importError != null) { + setState(() { + _importError = null; + }); + } + }, + ), + + // Warning for preimage-only mode + if (_importMode == _ImportMode.preimage) ...[ + const SizedBox(height: 16), + Container( + padding: const EdgeInsets.all(12), + decoration: BoxDecoration( + color: Colors.amber.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(8), + border: Border.all( + color: Colors.amber.withValues(alpha: 0.3), + ), + ), + child: Row( children: [ - SvgPicture.asset('assets/logo/logo.svg', width: 80, height: 80), - const SizedBox(height: 24), - const Text( - 'Add Rewards Account', - style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold), - textAlign: TextAlign.center, + const Icon( + Icons.info_outline, + color: Colors.amber, + size: 20, ), - const SizedBox(height: 8), - const Text( - 'Your minted coins will go there.', - textAlign: TextAlign.center, - style: TextStyle(fontSize: 16, color: Colors.grey), - ), - const SizedBox(height: 32), - TextField( - controller: _addressController, - focusNode: _focusNode, - autofocus: true, - enableInteractiveSelection: true, - onSubmitted: (_) => _saveRewardsAddress(), - contextMenuBuilder: (context, editableTextState) { - return AdaptiveTextSelectionToolbar.editableText(editableTextState: editableTextState); - }, - decoration: InputDecoration( - labelText: 'Rewards Wallet Address', - border: const OutlineInputBorder(), - hintText: 'Paste your address here', - prefixIcon: const Icon(Icons.account_balance_wallet), - suffixIcon: Row( - mainAxisSize: MainAxisSize.min, - children: [ - if (_addressController.text.isNotEmpty) - IconButton( - icon: const Icon(Icons.clear), - onPressed: () { - _addressController.clear(); - }, - tooltip: 'Clear', - ), - IconButton( - icon: const Icon(Icons.paste), - onPressed: () async { - final data = await Clipboard.getData(Clipboard.kTextPlain); - if (data?.text != null) { - _addressController.text = data!.text!; - } - }, - tooltip: 'Paste', - ), - ], + const SizedBox(width: 8), + Expanded( + child: Text( + 'Without the recovery phrase, you cannot withdraw rewards from this app. Use this option only if you plan to withdraw using the CLI.', + style: TextStyle( + fontSize: 13, + color: Colors.amber.shade200, ), ), - maxLines: 1, - ), - const SizedBox(height: 24), - ElevatedButton.icon( - onPressed: _saveRewardsAddress, - icon: const Icon(Icons.save), - label: const Text('Set Rewards Address'), - style: ElevatedButton.styleFrom( - padding: const EdgeInsets.symmetric(vertical: 16), - textStyle: const TextStyle(fontSize: 18), - ), - ), - const SizedBox(height: 48), - const Divider(), - const SizedBox(height: 24), - const Text( - "Don't have an account?", - textAlign: TextAlign.center, - style: TextStyle(fontSize: 18, fontWeight: FontWeight.bold), ), - const SizedBox(height: 8), - const Text( - 'Create one in the mobile wallet.', - textAlign: TextAlign.center, - style: TextStyle(fontSize: 16, color: Colors.grey), + ], + ), + ), + ], + + const SizedBox(height: 24), + ElevatedButton( + onPressed: _importMode == _ImportMode.mnemonic + ? _saveImportedMnemonic + : _saveImportedPreimage, + style: ElevatedButton.styleFrom( + padding: const EdgeInsets.symmetric(vertical: 16), + textStyle: const TextStyle(fontSize: 18), + ), + child: Text( + _importMode == _ImportMode.mnemonic + ? 'Import Wallet' + : 'Save Preimage', + ), + ), + ], + ), + ), + ); + } + + /// Success view for preimage-only import (no mnemonic). + Widget _buildPreimageOnlySuccessView() { + return SingleChildScrollView( + padding: const EdgeInsets.all(24.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.stretch, + children: [ + const Icon(Icons.check_circle, size: 64, color: Colors.green), + const SizedBox(height: 16), + const Text( + 'Preimage Saved!', + style: TextStyle(fontSize: 22, fontWeight: FontWeight.bold), + textAlign: TextAlign.center, + ), + const SizedBox(height: 8), + const Text( + 'Your mining rewards will be directed using this preimage.', + textAlign: TextAlign.center, + style: TextStyle(fontSize: 14, color: Colors.grey), + ), + const SizedBox(height: 32), + + // Rewards Preimage + _buildInfoCard( + title: 'Rewards Preimage', + subtitle: 'Used by the node to direct rewards', + value: _savedPreimageOnly!, + icon: Icons.key, + color: Colors.blue, + ), + const SizedBox(height: 24), + + Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.amber.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.amber.withValues(alpha: 0.3)), + ), + child: Row( + children: [ + const Icon(Icons.warning_amber, color: Colors.amber), + const SizedBox(width: 12), + Expanded( + child: Text( + 'Without your recovery phrase, you cannot withdraw rewards from this app. Make sure you have access to your secret via the CLI or another tool.', + style: TextStyle( + fontSize: 14, + color: Colors.amber.shade200, ), - const SizedBox(height: 16), - OutlinedButton.icon( - onPressed: _showQrOverlay, - icon: const Icon(Icons.qr_code), - label: const Text('Scan QR code to set up wallet'), - style: OutlinedButton.styleFrom(padding: const EdgeInsets.symmetric(vertical: 12)), + ), + ), + ], + ), + ), + const SizedBox(height: 32), + + ElevatedButton.icon( + onPressed: _continueToMining, + icon: const Icon(Icons.rocket_launch), + label: const Text('Start Mining'), + style: ElevatedButton.styleFrom( + padding: const EdgeInsets.symmetric(vertical: 16), + textStyle: const TextStyle(fontSize: 18), + backgroundColor: Colors.green, + foregroundColor: Colors.white, + ), + ), + ], + ), + ); + } + + /// Success view showing the derived wormhole address and rewards preimage. + Widget _buildSuccessView() { + return SingleChildScrollView( + padding: const EdgeInsets.all(24.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.stretch, + children: [ + const Icon(Icons.check_circle, size: 64, color: Colors.green), + const SizedBox(height: 16), + const Text( + 'Wallet Created Successfully!', + style: TextStyle(fontSize: 22, fontWeight: FontWeight.bold), + textAlign: TextAlign.center, + ), + const SizedBox(height: 8), + const Text( + 'Your mining rewards will be sent to this wormhole address.', + textAlign: TextAlign.center, + style: TextStyle(fontSize: 14, color: Colors.grey), + ), + const SizedBox(height: 32), + + // Wormhole Address + _buildInfoCard( + title: 'Wormhole Address', + subtitle: 'Where your mining rewards go', + value: _savedKeyPair!.address, + icon: Icons.account_balance_wallet, + color: Colors.green, + ), + const SizedBox(height: 16), + + // Rewards Preimage + _buildInfoCard( + title: 'Rewards Preimage', + subtitle: 'Used by the node (auto-configured)', + value: _savedKeyPair!.rewardsPreimage, + icon: Icons.key, + color: Colors.blue, + ), + const SizedBox(height: 32), + + Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.green.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.green.withValues(alpha: 0.3)), + ), + child: Row( + children: [ + const Icon(Icons.info_outline, color: Colors.green), + const SizedBox(width: 12), + Expanded( + child: Text( + 'The rewards preimage has been saved automatically. The mining node will use it to direct rewards to your wormhole address.', + style: TextStyle( + fontSize: 14, + color: Colors.green.shade200, ), - ], + ), + ), + ], + ), + ), + const SizedBox(height: 32), + + ElevatedButton.icon( + onPressed: _continueToMining, + icon: const Icon(Icons.rocket_launch), + label: const Text('Start Mining'), + style: ElevatedButton.styleFrom( + padding: const EdgeInsets.symmetric(vertical: 16), + textStyle: const TextStyle(fontSize: 18), + backgroundColor: Colors.green, + foregroundColor: Colors.white, + ), + ), + ], + ), + ); + } + + /// Helper widget for displaying address/preimage info cards. + Widget _buildInfoCard({ + required String title, + required String subtitle, + required String value, + required IconData icon, + required Color color, + }) { + return Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.grey.shade900, + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.grey.shade700), + ), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + Icon(icon, color: color, size: 20), + const SizedBox(width: 8), + Text( + title, + style: const TextStyle( + fontWeight: FontWeight.bold, + fontSize: 16, ), ), + ], + ), + const SizedBox(height: 4), + Text( + subtitle, + style: TextStyle(fontSize: 12, color: Colors.grey.shade500), + ), + const SizedBox(height: 12), + Container( + width: double.infinity, + padding: const EdgeInsets.all(12), + decoration: BoxDecoration( + color: Colors.grey.shade800, + borderRadius: BorderRadius.circular(8), + ), + child: SelectableText( + value, + style: const TextStyle(fontFamily: 'monospace', fontSize: 13), + ), + ), + const SizedBox(height: 8), + Align( + alignment: Alignment.centerRight, + child: TextButton.icon( + onPressed: () => _copyToClipboard(value, title), + icon: const Icon(Icons.copy, size: 16), + label: const Text('Copy'), + style: TextButton.styleFrom(foregroundColor: color), + ), + ), + ], ), ); } diff --git a/miner-app/lib/features/withdrawal/withdrawal_screen.dart b/miner-app/lib/features/withdrawal/withdrawal_screen.dart new file mode 100644 index 00000000..f48fadb3 --- /dev/null +++ b/miner-app/lib/features/withdrawal/withdrawal_screen.dart @@ -0,0 +1,970 @@ +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; +import 'package:go_router/go_router.dart'; +import 'package:quantus_miner/src/services/miner_settings_service.dart'; +import 'package:quantus_miner/src/services/transfer_tracking_service.dart'; +import 'package:quantus_miner/src/services/withdrawal_service.dart'; +import 'package:quantus_miner/src/services/wormhole_address_manager.dart'; +import 'package:quantus_miner/src/shared/extensions/snackbar_extensions.dart'; +import 'package:quantus_miner/src/utils/app_logger.dart'; +import 'package:quantus_sdk/quantus_sdk.dart' + hide WormholeAddressManager, TrackedWormholeAddress, WormholeAddressPurpose; + +final _log = log.withTag('Withdrawal'); + +/// Screen for withdrawing mining rewards from wormhole address. +class WithdrawalScreen extends StatefulWidget { + /// Available balance in planck (12 decimals) + final BigInt availableBalance; + + /// Wormhole address where rewards are stored + final String wormholeAddress; + + /// Secret hex for proof generation + final String secretHex; + + const WithdrawalScreen({ + super.key, + required this.availableBalance, + required this.wormholeAddress, + required this.secretHex, + }); + + @override + State createState() => _WithdrawalScreenState(); +} + +class _WithdrawalScreenState extends State { + final _formKey = GlobalKey(); + final _destinationController = TextEditingController(); + final _amountController = TextEditingController(); + + bool _isWithdrawing = false; + bool _withdrawAll = true; + String? _error; + double _progress = 0; + String _statusMessage = ''; + + // Circuit status + final _circuitManager = CircuitManager(); + CircuitStatus _circuitStatus = CircuitStatus.unavailable; + + // Transfer tracking + final _transferTrackingService = TransferTrackingService(); + List _trackedTransfers = []; + bool _hasLoadedTransfers = false; + + // Address manager for change addresses + final _addressManager = WormholeAddressManager(); + bool _addressManagerReady = false; + + // Fee is 10 basis points (0.1%) + static const int _feeBps = 10; + + @override + void initState() { + super.initState(); + // Default to max amount + _updateAmountToMax(); + // Check circuit availability + _checkCircuits(); + // Load tracked transfers + _loadTrackedTransfers(); + // Initialize address manager for change addresses + _initAddressManager(); + } + + Future _initAddressManager() async { + try { + await _addressManager.initialize(); + if (mounted) { + setState(() { + _addressManagerReady = true; + }); + } + _log.i( + 'Address manager initialized with ${_addressManager.allAddresses.length} addresses', + ); + } catch (e) { + _log.e('Failed to initialize address manager', error: e); + // Still mark as ready so full withdrawals can proceed + if (mounted) { + setState(() { + _addressManagerReady = true; + }); + } + } + } + + Future _loadTrackedTransfers() async { + try { + // Wait for address manager to be ready + if (!_addressManagerReady) { + await _addressManager.initialize(); + } + + // Initialize the tracking service with current chain config + final settingsService = MinerSettingsService(); + final chainConfig = await settingsService.getChainConfig(); + + // Get all known addresses (primary + change addresses) + final allAddresses = _addressManager.allAddressStrings; + final addressesToTrack = allAddresses.isNotEmpty + ? allAddresses + : {widget.wormholeAddress}; + + _transferTrackingService.initialize( + rpcUrl: chainConfig.rpcUrl, + wormholeAddresses: addressesToTrack, + ); + + // Load from disk first + await _transferTrackingService.loadFromDisk(); + + // Get unspent transfers for ALL tracked addresses + final allTransfers = []; + + // Check primary address + final primaryTransfers = await _transferTrackingService + .getUnspentTransfers( + wormholeAddress: widget.wormholeAddress, + secretHex: widget.secretHex, + ); + allTransfers.addAll(primaryTransfers); + _log.i( + 'Primary address ${widget.wormholeAddress}: ${primaryTransfers.length} unspent', + ); + + // Check change addresses from address manager + for (final tracked in _addressManager.allAddresses) { + if (tracked.address == widget.wormholeAddress) continue; // Skip primary + + final transfers = await _transferTrackingService.getUnspentTransfers( + wormholeAddress: tracked.address, + secretHex: tracked.secretHex, + ); + if (transfers.isNotEmpty) { + allTransfers.addAll(transfers); + _log.i( + 'Change address ${tracked.address}: ${transfers.length} unspent', + ); + } + } + + if (mounted) { + setState(() { + _trackedTransfers = allTransfers; + _hasLoadedTransfers = true; + }); + // Update amount field now that we know the real withdrawable balance + _updateAmountToMax(); + } + + _log.i( + 'Loaded ${allTransfers.length} total tracked transfers for withdrawal', + ); + } catch (e) { + _log.e('Failed to load tracked transfers', error: e); + if (mounted) { + setState(() { + _hasLoadedTransfers = true; // Mark as loaded even on error + }); + } + } + } + + Future _checkCircuits() async { + final status = await _circuitManager.checkStatus(); + if (mounted) { + setState(() { + _circuitStatus = status; + }); + } + } + + /// Extract circuit files if needed. Returns true if circuits are ready. + Future _ensureCircuitsExtracted() async { + // Check if already available + if (_circuitStatus.isAvailable && _circuitStatus.circuitDir != null) { + return true; + } + + _log.i('Circuits not available, extracting from assets...'); + setState(() { + _progress = 0.05; + _statusMessage = 'Extracting circuit files (one-time setup)...'; + }); + + bool success = false; + try { + success = await _circuitManager.extractCircuitsFromAssets( + onProgress: (progress, message) { + _log.d('Circuit extraction progress: $progress - $message'); + if (mounted) { + setState(() { + // Scale extraction progress to 0-20% of total withdrawal progress + _progress = progress * 0.2; + _statusMessage = message; + }); + } + }, + ); + _log.i('Circuit extraction finished. Success: $success'); + } catch (e) { + _log.e('Circuit extraction threw exception', error: e); + success = false; + } + + if (!mounted) return false; + + // Update circuit status + final status = await _circuitManager.checkStatus(); + setState(() { + _circuitStatus = status; + }); + + if (!success || !status.isAvailable) { + setState(() { + _error = 'Failed to extract circuit files. Please try again.'; + }); + return false; + } + + return true; + } + + @override + void dispose() { + _destinationController.dispose(); + _amountController.dispose(); + super.dispose(); + } + + void _updateAmountToMax() { + final formatted = NumberFormattingService().formatBalance( + _withdrawableBalance, + addSymbol: false, + ); + _amountController.text = formatted; + } + + BigInt _parseAmount(String text) { + try { + // Remove any commas and parse + final cleaned = text.replaceAll(',', '').trim(); + final parts = cleaned.split('.'); + + BigInt wholePart = BigInt.parse(parts[0]); + BigInt fractionalPart = BigInt.zero; + + if (parts.length > 1) { + // Pad or truncate to 12 decimal places + String fraction = parts[1].padRight(12, '0').substring(0, 12); + fractionalPart = BigInt.parse(fraction); + } + + // Convert to planck (12 decimal places) + return wholePart * BigInt.from(10).pow(12) + fractionalPart; + } catch (e) { + return BigInt.zero; + } + } + + String? _validateDestination(String? value) { + if (value == null || value.trim().isEmpty) { + return 'Please enter a destination address'; + } + + final trimmed = value.trim(); + + // Quantus addresses (SS58 prefix 189) must start with "qz" + if (!trimmed.startsWith('qz')) { + return 'Address must start with "qz"'; + } + + // Check for valid base58 characters + final base58Regex = RegExp(r'^[1-9A-HJ-NP-Za-km-z]+$'); + if (!base58Regex.hasMatch(trimmed)) { + return 'Invalid address format'; + } + + return null; + } + + String? _validateAmount(String? value) { + if (value == null || value.trim().isEmpty) { + return 'Please enter an amount'; + } + final amount = _parseAmount(value); + if (amount <= BigInt.zero) { + return 'Amount must be greater than 0'; + } + if (amount > _withdrawableBalance) { + return 'Amount exceeds available balance'; + } + // Check minimum after fee + final afterFee = + amount - (amount * BigInt.from(_feeBps) ~/ BigInt.from(10000)); + // Minimum is 0.03 QTN (3 quantized units = 3 * 10^10 planck) + final minAmount = BigInt.from(3) * BigInt.from(10).pow(10); + if (afterFee < minAmount) { + return 'Amount too small after fee (min ~0.03 QTN)'; + } + return null; + } + + Future _startWithdrawal() async { + if (!_formKey.currentState!.validate()) return; + + // Check if address manager is ready (needed for partial withdrawals with change) + if (!_addressManagerReady) { + setState(() { + _error = 'Please wait, initializing...'; + }); + return; + } + + setState(() { + _isWithdrawing = true; + _error = null; + _progress = 0; + _statusMessage = 'Preparing withdrawal...'; + }); + + try { + final destination = _destinationController.text.trim(); + final amount = _withdrawAll + ? _withdrawableBalance + : _parseAmount(_amountController.text); + + _log.i('Starting withdrawal of $amount planck to $destination'); + + // Extract circuits if needed (auto-extracts on first withdrawal) + final circuitsReady = await _ensureCircuitsExtracted(); + if (!circuitsReady) { + setState(() { + _isWithdrawing = false; + }); + return; + } + + final withdrawalService = WithdrawalService(); + final circuitBinsDir = _circuitStatus.circuitDir!; + + // Check if we have tracked transfers (required for exact amounts) + if (_trackedTransfers.isEmpty) { + setState(() { + _error = + 'No tracked transfers available. Mining rewards can only be ' + 'withdrawn for blocks mined while the app was open.'; + _isWithdrawing = false; + }); + return; + } + + _log.i( + 'Using ${_trackedTransfers.length} tracked transfers with exact amounts', + ); + + final result = await withdrawalService.withdraw( + secretHex: widget.secretHex, + wormholeAddress: widget.wormholeAddress, + destinationAddress: destination, + amount: _withdrawAll ? null : amount, + circuitBinsDir: circuitBinsDir, + trackedTransfers: _trackedTransfers.isNotEmpty + ? _trackedTransfers + : null, + addressManager: _addressManager, + onProgress: (progress, message) { + if (mounted) { + setState(() { + // Scale withdrawal progress to 20-100% (extraction uses 0-20%) + _progress = 0.2 + (progress * 0.8); + _statusMessage = message; + }); + } + }, + ); + + if (result.success) { + // If change was generated, add the change address to transfer tracking + if (result.changeAddress != null) { + _transferTrackingService.addTrackedAddress(result.changeAddress!); + _log.i('Added change address to tracking: ${result.changeAddress}'); + _log.i('Change amount: ${result.changeAmount} planck'); + } + + if (mounted) { + final message = result.changeAddress != null + ? 'Withdrawal successful! Change sent to new address.' + : 'Withdrawal successful!'; + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('$message TX: ${result.txHash}'), + backgroundColor: Colors.green, + ), + ); + context.pop(); + } + } else { + setState(() { + _error = result.error; + }); + } + } catch (e) { + _log.e('Withdrawal failed', error: e); + setState(() { + _error = e.toString(); + }); + } finally { + if (mounted) { + setState(() { + _isWithdrawing = false; + }); + } + } + } + + Widget _buildCircuitStatusCard() { + if (_circuitStatus.isAvailable) { + final batchSize = _circuitStatus.numLeafProofs ?? 16; + return Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.green.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.green.withValues(alpha: 0.3)), + ), + child: Row( + children: [ + Icon(Icons.check_circle, color: Colors.green.shade400, size: 20), + const SizedBox(width: 12), + Expanded( + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + 'Circuit files ready', + style: TextStyle( + fontSize: 14, + fontWeight: FontWeight.w500, + color: Colors.green.shade200, + ), + ), + Text( + 'Batch size: $batchSize proofs${_circuitStatus.totalSizeBytes != null ? ' • ${CircuitManager.formatBytes(_circuitStatus.totalSizeBytes!)}' : ''}', + style: TextStyle( + fontSize: 12, + color: Colors.green.shade300, + ), + ), + ], + ), + ), + ], + ), + ); + } + + // Circuit files not yet extracted - will auto-extract on first withdrawal + return Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.blue.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.blue.withValues(alpha: 0.3)), + ), + child: Row( + children: [ + Icon(Icons.info_outline, color: Colors.blue.shade400, size: 20), + const SizedBox(width: 12), + Expanded( + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + 'Circuit files will be extracted', + style: TextStyle( + fontSize: 14, + fontWeight: FontWeight.w500, + color: Colors.blue.shade200, + ), + ), + Text( + 'One-time setup (~163MB, takes a few seconds)', + style: TextStyle(fontSize: 12, color: Colors.blue.shade300), + ), + ], + ), + ), + ], + ), + ); + } + + Widget _buildTransferTrackingCard() { + if (!_hasLoadedTransfers) { + return Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.grey.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.grey.withValues(alpha: 0.3)), + ), + child: const Row( + children: [ + SizedBox( + width: 20, + height: 20, + child: CircularProgressIndicator(strokeWidth: 2), + ), + SizedBox(width: 12), + Text( + 'Loading transfer data...', + style: TextStyle(fontSize: 14, color: Colors.grey), + ), + ], + ), + ); + } + + if (_trackedTransfers.isNotEmpty) { + final totalTracked = _trackedTransfers.fold( + BigInt.zero, + (sum, t) => sum + t.amount, + ); + final formattedTotal = NumberFormattingService().formatBalance( + totalTracked, + addSymbol: true, + ); + + // Calculate dummy proofs needed + final batchSize = _circuitStatus.numLeafProofs ?? 16; + final realProofs = _trackedTransfers.length; + final dummyProofs = batchSize - (realProofs % batchSize); + final effectiveDummies = dummyProofs == batchSize ? 0 : dummyProofs; + + return Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.green.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.green.withValues(alpha: 0.3)), + ), + child: Row( + children: [ + Icon(Icons.check_circle, color: Colors.green.shade400, size: 20), + const SizedBox(width: 12), + Expanded( + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + '${_trackedTransfers.length} transfer(s) tracked', + style: TextStyle( + fontSize: 14, + fontWeight: FontWeight.w500, + color: Colors.green.shade200, + ), + ), + Text( + 'Total: $formattedTotal', + style: TextStyle( + fontSize: 12, + color: Colors.green.shade300, + ), + ), + Text( + '$realProofs real + $effectiveDummies dummy = $batchSize proofs per batch', + style: TextStyle( + fontSize: 11, + color: Colors.green.shade400, + ), + ), + ], + ), + ), + ], + ), + ); + } + + // No tracked transfers - show warning + return Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.orange.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all(color: Colors.orange.withValues(alpha: 0.3)), + ), + child: Row( + children: [ + Icon(Icons.warning_amber, color: Colors.orange.shade400, size: 20), + const SizedBox(width: 12), + Expanded( + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + 'No tracked transfers', + style: TextStyle( + fontSize: 14, + fontWeight: FontWeight.w500, + color: Colors.orange.shade200, + ), + ), + Text( + 'Mining rewards are only tracked while the app is open. Withdrawal may fail.', + style: TextStyle(fontSize: 12, color: Colors.orange.shade300), + ), + ], + ), + ), + ], + ), + ); + } + + /// Get the actual withdrawable balance from tracked unspent transfers. + BigInt get _withdrawableBalance { + if (_trackedTransfers.isEmpty) { + // Fall back to on-chain balance if no tracked transfers + return widget.availableBalance; + } + return _trackedTransfers.fold( + BigInt.zero, + (sum, t) => sum + t.amount, + ); + } + + @override + Widget build(BuildContext context) { + final formattedBalance = NumberFormattingService().formatBalance( + _withdrawableBalance, + addSymbol: true, + ); + + return Scaffold( + backgroundColor: const Color(0xFF0A0A0A), + appBar: AppBar( + backgroundColor: Colors.transparent, + title: const Text('Withdraw Rewards'), + leading: IconButton( + icon: const Icon(Icons.arrow_back), + onPressed: _isWithdrawing ? null : () => context.pop(), + ), + ), + body: SafeArea( + child: SingleChildScrollView( + padding: const EdgeInsets.all(24), + child: Form( + key: _formKey, + child: Column( + crossAxisAlignment: CrossAxisAlignment.stretch, + children: [ + // Available balance card + Container( + padding: const EdgeInsets.all(20), + decoration: BoxDecoration( + gradient: LinearGradient( + colors: [ + const Color(0xFF10B981).withValues(alpha: 0.2), + const Color(0xFF059669).withValues(alpha: 0.1), + ], + ), + borderRadius: BorderRadius.circular(16), + border: Border.all( + color: const Color(0xFF10B981).withValues(alpha: 0.3), + ), + ), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + 'Available Balance', + style: TextStyle( + fontSize: 14, + color: Colors.white.withValues(alpha: 0.7), + ), + ), + const SizedBox(height: 8), + Text( + formattedBalance, + style: const TextStyle( + fontSize: 28, + fontWeight: FontWeight.bold, + color: Color(0xFF10B981), + ), + ), + ], + ), + ), + const SizedBox(height: 16), + + // Circuit status card + _buildCircuitStatusCard(), + const SizedBox(height: 16), + + // Transfer tracking status card + _buildTransferTrackingCard(), + const SizedBox(height: 32), + + // Destination address + Text( + 'Destination Address', + style: TextStyle( + fontSize: 14, + fontWeight: FontWeight.w500, + color: Colors.white.withValues(alpha: 0.9), + ), + ), + const SizedBox(height: 8), + TextFormField( + controller: _destinationController, + enabled: !_isWithdrawing, + validator: _validateDestination, + style: const TextStyle(fontFamily: 'Fira Code', fontSize: 14), + decoration: InputDecoration( + hintText: 'Enter destination address', + filled: true, + fillColor: Colors.white.withValues(alpha: 0.05), + border: OutlineInputBorder( + borderRadius: BorderRadius.circular(12), + borderSide: BorderSide( + color: Colors.white.withValues(alpha: 0.1), + ), + ), + enabledBorder: OutlineInputBorder( + borderRadius: BorderRadius.circular(12), + borderSide: BorderSide( + color: Colors.white.withValues(alpha: 0.1), + ), + ), + suffixIcon: IconButton( + icon: const Icon(Icons.paste), + onPressed: _isWithdrawing + ? null + : () async { + final data = await Clipboard.getData( + Clipboard.kTextPlain, + ); + if (data?.text != null) { + _destinationController.text = data!.text! + .trim(); + } + }, + ), + ), + ), + const SizedBox(height: 24), + + // Amount + Row( + children: [ + Text( + 'Amount', + style: TextStyle( + fontSize: 14, + fontWeight: FontWeight.w500, + color: Colors.white.withValues(alpha: 0.9), + ), + ), + const Spacer(), + Row( + children: [ + Checkbox( + value: _withdrawAll, + onChanged: _isWithdrawing + ? null + : (value) { + setState(() { + _withdrawAll = value ?? true; + if (_withdrawAll) { + _updateAmountToMax(); + } + }); + }, + ), + Text( + 'Withdraw all', + style: TextStyle( + fontSize: 14, + color: Colors.white.withValues(alpha: 0.7), + ), + ), + ], + ), + ], + ), + const SizedBox(height: 8), + TextFormField( + controller: _amountController, + enabled: !_isWithdrawing && !_withdrawAll, + validator: _validateAmount, + keyboardType: const TextInputType.numberWithOptions( + decimal: true, + ), + style: const TextStyle(fontSize: 18), + decoration: InputDecoration( + hintText: '0.00', + filled: true, + fillColor: Colors.white.withValues(alpha: 0.05), + border: OutlineInputBorder( + borderRadius: BorderRadius.circular(12), + borderSide: BorderSide( + color: Colors.white.withValues(alpha: 0.1), + ), + ), + enabledBorder: OutlineInputBorder( + borderRadius: BorderRadius.circular(12), + borderSide: BorderSide( + color: Colors.white.withValues(alpha: 0.1), + ), + ), + disabledBorder: OutlineInputBorder( + borderRadius: BorderRadius.circular(12), + borderSide: BorderSide( + color: Colors.white.withValues(alpha: 0.05), + ), + ), + suffixText: 'QTN', + ), + ), + const SizedBox(height: 16), + + // Fee info + Container( + padding: const EdgeInsets.all(12), + decoration: BoxDecoration( + color: Colors.blue.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(8), + ), + child: Row( + children: [ + Icon( + Icons.info_outline, + size: 16, + color: Colors.blue.shade300, + ), + const SizedBox(width: 8), + Expanded( + child: Text( + 'Network fee: 0.1% of withdrawal amount', + style: TextStyle( + fontSize: 12, + color: Colors.blue.shade200, + ), + ), + ), + ], + ), + ), + const SizedBox(height: 32), + + // Error message + if (_error != null) ...[ + Container( + padding: const EdgeInsets.all(12), + decoration: BoxDecoration( + color: Colors.red.withValues(alpha: 0.1), + borderRadius: BorderRadius.circular(8), + border: Border.all( + color: Colors.red.withValues(alpha: 0.3), + ), + ), + child: Row( + children: [ + const Icon( + Icons.error_outline, + size: 16, + color: Colors.red, + ), + const SizedBox(width: 8), + Expanded( + child: Text( + _error!, + style: const TextStyle( + fontSize: 12, + color: Colors.red, + ), + ), + ), + ], + ), + ), + const SizedBox(height: 16), + ], + + // Progress indicator + if (_isWithdrawing) ...[ + Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Colors.white.withValues(alpha: 0.05), + borderRadius: BorderRadius.circular(12), + ), + child: Column( + children: [ + Text( + _statusMessage, + style: TextStyle( + fontSize: 14, + color: Colors.white.withValues(alpha: 0.9), + ), + ), + const SizedBox(height: 12), + LinearProgressIndicator( + value: _progress, + backgroundColor: Colors.white.withValues(alpha: 0.1), + valueColor: const AlwaysStoppedAnimation( + Color(0xFF10B981), + ), + ), + ], + ), + ), + const SizedBox(height: 16), + ], + + // Withdraw button + SizedBox( + height: 56, + child: ElevatedButton( + onPressed: _isWithdrawing ? null : _startWithdrawal, + style: ElevatedButton.styleFrom( + backgroundColor: const Color(0xFF10B981), + foregroundColor: Colors.white, + disabledBackgroundColor: const Color( + 0xFF10B981, + ).withValues(alpha: 0.5), + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(12), + ), + ), + child: _isWithdrawing + ? const SizedBox( + height: 24, + width: 24, + child: CircularProgressIndicator( + strokeWidth: 2, + color: Colors.white, + ), + ) + : const Text( + 'Withdraw', + style: TextStyle( + fontSize: 18, + fontWeight: FontWeight.w600, + ), + ), + ), + ), + ], + ), + ), + ), + ), + ); + } +} diff --git a/miner-app/lib/main.dart b/miner-app/lib/main.dart index 765d0e79..d5928dd4 100644 --- a/miner-app/lib/main.dart +++ b/miner-app/lib/main.dart @@ -7,7 +7,9 @@ import 'features/setup/node_setup_screen.dart'; import 'features/setup/node_identity_setup_screen.dart'; import 'features/setup/rewards_address_setup_screen.dart'; import 'features/miner/miner_dashboard_screen.dart'; +import 'features/withdrawal/withdrawal_screen.dart'; import 'src/services/binary_manager.dart'; +import 'src/services/miner_wallet_service.dart'; import 'src/services/mining_orchestrator.dart'; import 'src/services/process_cleanup_service.dart'; import 'src/utils/app_logger.dart'; @@ -71,9 +73,17 @@ class GlobalMinerManager { } } -Future initialRedirect(BuildContext context, GoRouterState state) async { +Future initialRedirect( + BuildContext context, + GoRouterState state, +) async { final currentRoute = state.uri.toString(); + // Don't redirect if already on a sub-route (like /withdraw) + if (currentRoute == '/withdraw') { + return null; + } + // Check 1: Node Installed bool isNodeInstalled = false; try { @@ -91,7 +101,8 @@ Future initialRedirect(BuildContext context, GoRouterState state) async // Check 2: Node Identity Set bool isIdentitySet = false; try { - final identityPath = '${await BinaryManager.getQuantusHomeDirectoryPath()}/node_key.p2p'; + final identityPath = + '${await BinaryManager.getQuantusHomeDirectoryPath()}/node_key.p2p'; isIdentitySet = await File(identityPath).exists(); } catch (e) { _log.e('Error checking node identity', error: e); @@ -99,22 +110,25 @@ Future initialRedirect(BuildContext context, GoRouterState state) async } if (!isIdentitySet) { - return (currentRoute == '/node_identity_setup') ? null : '/node_identity_setup'; + return (currentRoute == '/node_identity_setup') + ? null + : '/node_identity_setup'; } - // Check 3: Rewards Address Set - bool isRewardsAddressSet = false; + // Check 3: Rewards Wallet Set (mnemonic-based wormhole address) + bool isRewardsWalletSet = false; try { - final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); - final rewardsFile = File('$quantusHome/rewards-address.txt'); - isRewardsAddressSet = await rewardsFile.exists(); + final walletService = MinerWalletService(); + isRewardsWalletSet = await walletService.isSetupComplete(); } catch (e) { - _log.e('Error checking rewards address', error: e); - isRewardsAddressSet = false; + _log.e('Error checking rewards wallet', error: e); + isRewardsWalletSet = false; } - if (!isRewardsAddressSet) { - return (currentRoute == '/rewards_address_setup') ? null : '/rewards_address_setup'; + if (!isRewardsWalletSet) { + return (currentRoute == '/rewards_address_setup') + ? null + : '/rewards_address_setup'; } // If all setup steps are complete, go to the miner dashboard @@ -129,12 +143,36 @@ final _router = GoRouter( path: '/', // Builder is not strictly necessary if initialLocation and redirect handle it, // but can be a fallback or initial loading screen. - builder: (context, state) => const Scaffold(body: Center(child: CircularProgressIndicator())), + builder: (context, state) => + const Scaffold(body: Center(child: CircularProgressIndicator())), + ), + GoRoute( + path: '/node_setup', + builder: (context, state) => const NodeSetupScreen(), + ), + GoRoute( + path: '/node_identity_setup', + builder: (context, state) => const NodeIdentitySetupScreen(), + ), + GoRoute( + path: '/rewards_address_setup', + builder: (context, state) => const RewardsAddressSetupScreen(), + ), + GoRoute( + path: '/miner_dashboard', + builder: (context, state) => const MinerDashboardScreen(), + ), + GoRoute( + path: '/withdraw', + builder: (context, state) { + final extra = state.extra as Map; + return WithdrawalScreen( + availableBalance: extra['balance'] as BigInt, + wormholeAddress: extra['address'] as String, + secretHex: extra['secretHex'] as String, + ); + }, ), - GoRoute(path: '/node_setup', builder: (context, state) => const NodeSetupScreen()), - GoRoute(path: '/node_identity_setup', builder: (context, state) => const NodeIdentitySetupScreen()), - GoRoute(path: '/rewards_address_setup', builder: (context, state) => const RewardsAddressSetupScreen()), - GoRoute(path: '/miner_dashboard', builder: (context, state) => const MinerDashboardScreen()), ], ); @@ -206,6 +244,9 @@ class _MinerAppState extends State { } @override - Widget build(BuildContext context) => - MaterialApp.router(title: 'Quantus Miner', theme: ThemeData.dark(useMaterial3: true), routerConfig: _router); + Widget build(BuildContext context) => MaterialApp.router( + title: 'Quantus Miner', + theme: ThemeData.dark(useMaterial3: true), + routerConfig: _router, + ); } diff --git a/miner-app/lib/src/config/miner_config.dart b/miner-app/lib/src/config/miner_config.dart index 8884b23b..85f76ee1 100644 --- a/miner-app/lib/src/config/miner_config.dart +++ b/miner-app/lib/src/config/miner_config.dart @@ -108,6 +108,7 @@ class MinerConfig { displayName: 'Development', description: 'Local development chain', rpcUrl: 'http://127.0.0.1:9933', + subsquidUrl: 'http://127.0.0.1:4350/graphql', isDefault: true, ), ChainConfig( @@ -115,17 +116,22 @@ class MinerConfig { displayName: 'Dirac', description: 'Dirac testnet', rpcUrl: 'https://a1-dirac.quantus.cat', + subsquidUrl: 'https://subsquid.quantus.com/graphql', isDefault: false, ), ]; /// Get chain config by ID, returns dev chain if not found static ChainConfig getChainById(String id) { - return availableChains.firstWhere((chain) => chain.id == id, orElse: () => availableChains.first); + return availableChains.firstWhere( + (chain) => chain.id == id, + orElse: () => availableChains.first, + ); } /// The default chain ID - static String get defaultChainId => availableChains.firstWhere((c) => c.isDefault).id; + static String get defaultChainId => + availableChains.firstWhere((c) => c.isDefault).id; // ============================================================ // Process Names (for cleanup) @@ -169,6 +175,7 @@ class ChainConfig { final String displayName; final String description; final String rpcUrl; + final String subsquidUrl; final bool isDefault; const ChainConfig({ @@ -176,12 +183,15 @@ class ChainConfig { required this.displayName, required this.description, required this.rpcUrl, + required this.subsquidUrl, required this.isDefault, }); /// Whether this chain uses the local node RPC - bool get isLocalNode => rpcUrl.contains('127.0.0.1') || rpcUrl.contains('localhost'); + bool get isLocalNode => + rpcUrl.contains('127.0.0.1') || rpcUrl.contains('localhost'); @override - String toString() => 'ChainConfig(id: $id, displayName: $displayName, rpcUrl: $rpcUrl)'; + String toString() => + 'ChainConfig(id: $id, displayName: $displayName, rpcUrl: $rpcUrl)'; } diff --git a/miner-app/lib/src/models/miner_error.dart b/miner-app/lib/src/models/miner_error.dart index edc9daad..ba902485 100644 --- a/miner-app/lib/src/models/miner_error.dart +++ b/miner-app/lib/src/models/miner_error.dart @@ -66,7 +66,10 @@ class MinerError { ); /// Create a miner startup failure error. - factory MinerError.minerStartupFailed(Object error, [StackTrace? stackTrace]) => MinerError( + factory MinerError.minerStartupFailed( + Object error, [ + StackTrace? stackTrace, + ]) => MinerError( type: MinerErrorType.minerStartupFailed, message: 'Failed to start miner: $error', exception: error, @@ -74,7 +77,10 @@ class MinerError { ); /// Create a node startup failure error. - factory MinerError.nodeStartupFailed(Object error, [StackTrace? stackTrace]) => MinerError( + factory MinerError.nodeStartupFailed( + Object error, [ + StackTrace? stackTrace, + ]) => MinerError( type: MinerErrorType.nodeStartupFailed, message: 'Failed to start node: $error', exception: error, diff --git a/miner-app/lib/src/services/base_process_manager.dart b/miner-app/lib/src/services/base_process_manager.dart index 45fd678d..6142e3ec 100644 --- a/miner-app/lib/src/services/base_process_manager.dart +++ b/miner-app/lib/src/services/base_process_manager.dart @@ -51,7 +51,10 @@ abstract class BaseProcessManager { /// Initialize the log processor for a source void initLogProcessor(String sourceName, {SyncStateProvider? getSyncState}) { - _logProcessor = LogStreamProcessor(sourceName: sourceName, getSyncState: getSyncState); + _logProcessor = LogStreamProcessor( + sourceName: sourceName, + getSyncState: getSyncState, + ); } /// Attach process streams to log processor @@ -152,7 +155,10 @@ abstract class BaseProcessManager { try { _process!.kill(ProcessSignal.sigkill); - await _process!.exitCode.timeout(MinerConfig.processVerificationDelay, onTimeout: () => -1); + await _process!.exitCode.timeout( + MinerConfig.processVerificationDelay, + onTimeout: () => -1, + ); } catch (e) { log.e('Error during force kill', error: e); } diff --git a/miner-app/lib/src/services/binary_manager.dart b/miner-app/lib/src/services/binary_manager.dart index 5a25ca53..474ca915 100644 --- a/miner-app/lib/src/services/binary_manager.dart +++ b/miner-app/lib/src/services/binary_manager.dart @@ -21,10 +21,15 @@ class BinaryVersion { BinaryVersion(this.version, this.checkedAt); - Map toJson() => {'version': version, 'checkedAt': checkedAt.toIso8601String()}; - - factory BinaryVersion.fromJson(Map json) => - BinaryVersion(json['version'] as String, DateTime.parse(json['checkedAt'] as String)); + Map toJson() => { + 'version': version, + 'checkedAt': checkedAt.toIso8601String(), + }; + + factory BinaryVersion.fromJson(Map json) => BinaryVersion( + json['version'] as String, + DateTime.parse(json['checkedAt'] as String), + ); } class BinaryUpdateInfo { @@ -33,7 +38,12 @@ class BinaryUpdateInfo { final String? latestVersion; final String? downloadUrl; - BinaryUpdateInfo({required this.updateAvailable, this.currentVersion, this.latestVersion, this.downloadUrl}); + BinaryUpdateInfo({ + required this.updateAvailable, + this.currentVersion, + this.latestVersion, + this.downloadUrl, + }); } class BinaryManager { @@ -130,7 +140,11 @@ class BinaryManager { } static Future getLatestNodeVersion() async { - final rel = await http.get(Uri.parse('https://api.github.com/repos/$_repoOwner/$_repoName/releases/latest')); + final rel = await http.get( + Uri.parse( + 'https://api.github.com/repos/$_repoOwner/$_repoName/releases/latest', + ), + ); if (rel.statusCode != 200) { throw Exception('Failed to fetch latest node version: ${rel.statusCode}'); @@ -140,10 +154,16 @@ class BinaryManager { } static Future getLatestMinerVersion() async { - final rel = await http.get(Uri.parse('https://api.github.com/repos/$_repoOwner/$_minerRepoName/releases/latest')); + final rel = await http.get( + Uri.parse( + 'https://api.github.com/repos/$_repoOwner/$_minerRepoName/releases/latest', + ), + ); if (rel.statusCode != 200) { - throw Exception('Failed to fetch latest miner version: ${rel.statusCode}'); + throw Exception( + 'Failed to fetch latest miner version: ${rel.statusCode}', + ); } return jsonDecode(rel.body)['tag_name'] as String; @@ -162,13 +182,18 @@ class BinaryManager { ); } - final updateAvailable = _isNewerVersion(currentVersion.version, latestVersion); + final updateAvailable = _isNewerVersion( + currentVersion.version, + latestVersion, + ); return BinaryUpdateInfo( updateAvailable: updateAvailable, currentVersion: currentVersion.version, latestVersion: latestVersion, - downloadUrl: updateAvailable ? _buildNodeDownloadUrl(latestVersion) : null, + downloadUrl: updateAvailable + ? _buildNodeDownloadUrl(latestVersion) + : null, ); } catch (e) { _log.w('Error checking node update', error: e); @@ -189,13 +214,18 @@ class BinaryManager { ); } - final updateAvailable = _isNewerVersion(currentVersion.version, latestVersion); + final updateAvailable = _isNewerVersion( + currentVersion.version, + latestVersion, + ); return BinaryUpdateInfo( updateAvailable: updateAvailable, currentVersion: currentVersion.version, latestVersion: latestVersion, - downloadUrl: updateAvailable ? _buildMinerDownloadUrl(latestVersion) : null, + downloadUrl: updateAvailable + ? _buildMinerDownloadUrl(latestVersion) + : null, ); } catch (e) { _log.w('Error checking miner update', error: e); @@ -228,7 +258,8 @@ class BinaryManager { // Force x86_64 on Windows to support x64 emulation on ARM devices // unless we specifically start releasing native ARM64 Windows binaries arch = 'x86_64'; - } else if (Platform.version.contains('arm64') || Platform.version.contains('aarch64')) { + } else if (Platform.version.contains('arm64') || + Platform.version.contains('aarch64')) { arch = 'aarch64'; } else { arch = 'x86_64'; @@ -243,7 +274,9 @@ class BinaryManager { static bool _isNewerVersion(String current, String latest) { // Remove 'v' prefix if present - final currentClean = current.startsWith('v') ? current.substring(1) : current; + final currentClean = current.startsWith('v') + ? current.substring(1) + : current; final latestClean = latest.startsWith('v') ? latest.substring(1) : latest; final currentParts = currentClean.split('.').map(int.tryParse).toList(); @@ -277,7 +310,9 @@ class BinaryManager { return await _downloadNodeBinary(onProgress: onProgress); } - static Future updateNodeBinary({void Function(DownloadProgress progress)? onProgress}) async { + static Future updateNodeBinary({ + void Function(DownloadProgress progress)? onProgress, + }) async { _log.i('Updating node binary to latest version...'); final binPath = await getNodeBinaryFilePath(); @@ -294,7 +329,10 @@ class BinaryManager { try { // Download to temporary location first - final newBinary = await _downloadNodeBinary(onProgress: onProgress, isUpdate: true); + final newBinary = await _downloadNodeBinary( + onProgress: onProgress, + isUpdate: true, + ); // If download successful, replace the old binary if (await backupFile.exists()) { @@ -322,7 +360,11 @@ class BinaryManager { bool isUpdate = false, }) async { // Find latest tag on GitHub - final rel = await http.get(Uri.parse('https://api.github.com/repos/$_repoOwner/$_repoName/releases/latest')); + final rel = await http.get( + Uri.parse( + 'https://api.github.com/repos/$_repoOwner/$_repoName/releases/latest', + ), + ); final tag = jsonDecode(rel.body)['tag_name'] as String; _log.d('Found latest tag: $tag'); @@ -331,14 +373,17 @@ class BinaryManager { final target = _targetTriple(); final extension = Platform.isWindows ? "zip" : "tar.gz"; final asset = '$_binary-$tag-$target.$extension'; - final url = 'https://github.com/$_repoOwner/$_repoName/releases/download/$tag/$asset'; + final url = + 'https://github.com/$_repoOwner/$_repoName/releases/download/$tag/$asset'; // Download final cacheDir = await _getCacheDir(); final tgz = File(p.join(cacheDir.path, asset)); // Use temporary path for extraction during updates - final tempExtractDir = isUpdate ? Directory(p.join(cacheDir.path, 'temp_update')) : cacheDir; + final tempExtractDir = isUpdate + ? Directory(p.join(cacheDir.path, 'temp_update')) + : cacheDir; if (isUpdate && await tempExtractDir.exists()) { await tempExtractDir.delete(recursive: true); @@ -353,7 +398,9 @@ class BinaryManager { final response = await client.send(request); if (response.statusCode != 200) { - throw Exception('Failed to download binary: ${response.statusCode} ${response.reasonPhrase}'); + throw Exception( + 'Failed to download binary: ${response.statusCode} ${response.reasonPhrase}', + ); } final totalBytes = response.contentLength ?? -1; @@ -383,7 +430,10 @@ class BinaryManager { // Extract to temporary directory if updating await Process.run('tar', ['-xzf', tgz.path, '-C', tempExtractDir.path]); - final tempBinPath = p.join(tempExtractDir.path, _normalizeFilename(_binary)); + final tempBinPath = p.join( + tempExtractDir.path, + _normalizeFilename(_binary), + ); final finalBinPath = await getNodeBinaryFilePath(); if (!Platform.isWindows) await Process.run('chmod', ['+x', tempBinPath]); @@ -423,7 +473,9 @@ class BinaryManager { return await _downloadMinerBinary(onProgress: onProgress); } - static Future updateMinerBinary({void Function(DownloadProgress progress)? onProgress}) async { + static Future updateMinerBinary({ + void Function(DownloadProgress progress)? onProgress, + }) async { _log.i('Updating miner binary to latest version...'); final binPath = await getExternalMinerBinaryFilePath(); @@ -440,7 +492,10 @@ class BinaryManager { try { // Download to temporary location first - final newBinary = await _downloadMinerBinary(onProgress: onProgress, isUpdate: true); + final newBinary = await _downloadMinerBinary( + onProgress: onProgress, + isUpdate: true, + ); // If download successful, replace the old binary if (await backupFile.exists()) { @@ -470,7 +525,8 @@ class BinaryManager { _log.d('External miner binary download process starting...'); // Find latest tag on GitHub - final releaseUrl = 'https://api.github.com/repos/$_repoOwner/$_minerRepoName/releases/latest'; + final releaseUrl = + 'https://api.github.com/repos/$_repoOwner/$_minerRepoName/releases/latest'; _log.d('Fetching latest release from: $releaseUrl'); final rel = await http.get(Uri.parse(releaseUrl)); @@ -498,7 +554,8 @@ class BinaryManager { // Force x86_64 on Windows to support x64 emulation on ARM devices // unless we specifically start releasing native ARM64 Windows binaries arch = 'x86_64'; - } else if (Platform.version.contains('arm64') || Platform.version.contains('aarch64')) { + } else if (Platform.version.contains('arm64') || + Platform.version.contains('aarch64')) { arch = 'aarch64'; } else { arch = 'x86_64'; @@ -510,7 +567,8 @@ class BinaryManager { _log.d('Looking for asset: $asset'); - final url = 'https://github.com/$_repoOwner/$_minerRepoName/releases/download/$tag/$asset'; + final url = + 'https://github.com/$_repoOwner/$_minerRepoName/releases/download/$tag/$asset'; // Check if the asset exists in the release final assets = releaseData['assets'] as List; @@ -542,7 +600,9 @@ class BinaryManager { _log.d('Download response status: ${response.statusCode}'); if (response.statusCode != 200) { - throw Exception('Failed to download external miner binary: ${response.statusCode} ${response.reasonPhrase}'); + throw Exception( + 'Failed to download external miner binary: ${response.statusCode} ${response.reasonPhrase}', + ); } final totalBytes = response.contentLength ?? -1; @@ -574,7 +634,10 @@ class BinaryManager { // Set executable permissions on temp file if (!Platform.isWindows) { _log.d('Setting executable permissions on ${tempBinaryFile.path}'); - final chmodResult = await Process.run('chmod', ['+x', tempBinaryFile.path]); + final chmodResult = await Process.run('chmod', [ + '+x', + tempBinaryFile.path, + ]); _log.d('chmod exit code: ${chmodResult.exitCode}'); if (chmodResult.exitCode != 0) { _log.e('chmod stderr: ${chmodResult.stderr}'); @@ -603,8 +666,12 @@ class BinaryManager { // Save version info await _saveMinerVersion(tag); } else { - _log.e('External miner binary still not found at $binPath after download!'); - throw Exception('External miner binary not found after download at $binPath'); + _log.e( + 'External miner binary still not found at $binPath after download!', + ); + throw Exception( + 'External miner binary not found after download at $binPath', + ); } return binFile; @@ -622,7 +689,9 @@ class BinaryManager { if (await nodeKeyFile.exists()) { final stat = await nodeKeyFile.stat(); if (stat.size > 0) { - _log.d('Node key file already exists and has content (size: ${stat.size} bytes)'); + _log.d( + 'Node key file already exists and has content (size: ${stat.size} bytes)', + ); return nodeKeyFile; } } @@ -636,13 +705,20 @@ class BinaryManager { } try { - final processResult = await Process.run(nodeBinaryPath, ['key', 'generate-node-key', '--file', nodeKeyFile.path]); + final processResult = await Process.run(nodeBinaryPath, [ + 'key', + 'generate-node-key', + '--file', + nodeKeyFile.path, + ]); if (processResult.exitCode == 0) { if (await nodeKeyFile.exists()) { final stat = await nodeKeyFile.stat(); if (stat.size > 0) { - _log.i('Successfully generated node key file: ${nodeKeyFile.path} (size: ${stat.size} bytes)'); + _log.i( + 'Successfully generated node key file: ${nodeKeyFile.path} (size: ${stat.size} bytes)', + ); return nodeKeyFile; } else { throw Exception('Node key file was created but is empty'); @@ -661,15 +737,20 @@ class BinaryManager { } } - static String _normalizeFilename(String file) => Platform.isWindows ? "$file.exe" : file; + static String _normalizeFilename(String file) => + Platform.isWindows ? "$file.exe" : file; - static Future _getCacheDir() async => - Directory(p.join(await getQuantusHomeDirectoryPath(), 'bin')).create(recursive: true); + static Future _getCacheDir() async => Directory( + p.join(await getQuantusHomeDirectoryPath(), 'bin'), + ).create(recursive: true); - static String _home() => Platform.environment['HOME'] ?? Platform.environment['USERPROFILE']!; + static String _home() => + Platform.environment['HOME'] ?? Platform.environment['USERPROFILE']!; static String _targetTriple() { - final os = Platform.isMacOS ? 'apple-darwin' : (Platform.isWindows ? 'pc-windows-msvc' : 'unknown-linux-gnu'); + final os = Platform.isMacOS + ? 'apple-darwin' + : (Platform.isWindows ? 'pc-windows-msvc' : 'unknown-linux-gnu'); // Force x86_64 on Windows to ensure we download the x64 binary even on ARM devices // (since they can emulate x64, and we don't likely have a native ARM build for Windows yet) @@ -677,7 +758,11 @@ class BinaryManager { return 'x86_64-$os'; } - final arch = Platform.version.contains('arm64') || Platform.version.contains('aarch64') ? 'aarch64' : 'x86_64'; + final arch = + Platform.version.contains('arm64') || + Platform.version.contains('aarch64') + ? 'aarch64' + : 'x86_64'; return '$arch-$os'; } } diff --git a/miner-app/lib/src/services/chain_rpc_client.dart b/miner-app/lib/src/services/chain_rpc_client.dart index 08298ceb..4dce66ff 100644 --- a/miner-app/lib/src/services/chain_rpc_client.dart +++ b/miner-app/lib/src/services/chain_rpc_client.dart @@ -96,7 +96,8 @@ class ChainRpcClient { bool isSyncing = false; int? targetBlock; if (syncStateResult != null) { - if (syncStateResult['currentBlock'] != null && syncStateResult['highestBlock'] != null) { + if (syncStateResult['currentBlock'] != null && + syncStateResult['highestBlock'] != null) { final current = syncStateResult['currentBlock'] as int; final highest = syncStateResult['highestBlock'] as int; @@ -164,6 +165,230 @@ class ChainRpcClient { } } + /// Get block hash by block number. + Future getBlockHash(int blockNumber) async { + try { + final result = await _rpcCall('chain_getBlockHash', [ + '0x${blockNumber.toRadixString(16)}', + ]); + return result as String?; + } catch (e) { + return null; + } + } + + /// Get account balance (free balance) for an address. + /// + /// [address] should be an SS58-encoded address. + /// [accountIdHex] can be provided if already known (32 bytes as hex without 0x prefix). + /// Returns the free balance in planck (smallest unit), or null if the query fails. + Future getAccountBalance( + String address, { + String? accountIdHex, + }) async { + try { + // Build the storage key for System::Account(address) + final storageKey = _buildAccountStorageKey( + address, + accountIdHex: accountIdHex, + ); + if (storageKey == null) { + _log.w('Failed to build storage key for address: $address'); + return null; + } + + final result = await _rpcCall('state_getStorage', [storageKey]); + if (result == null) { + // Account doesn't exist, balance is 0 + return BigInt.zero; + } + + // Decode the AccountInfo structure + final balance = _decodeAccountBalance(result as String); + return balance; + } catch (e) { + _log.w('getAccountBalance error', error: e); + return null; + } + } + + /// Build the storage key for System::Account(address) + /// + /// [accountIdHex] can be provided if already known (32 bytes as hex without 0x prefix). + String? _buildAccountStorageKey(String ss58Address, {String? accountIdHex}) { + try { + // Get account ID bytes - either from provided hex or decode from SS58 + List accountIdBytes; + if (accountIdHex != null) { + // Use provided hex (remove 0x prefix if present) + final hex = accountIdHex.startsWith('0x') + ? accountIdHex.substring(2) + : accountIdHex; + accountIdBytes = _hexToBytes(hex); + } else { + // Decode SS58 address to get the raw account ID (32 bytes) + final decoded = _decodeSs58Address(ss58Address); + if (decoded == null) return null; + accountIdBytes = decoded; + } + + // Storage key = twox128("System") ++ twox128("Account") ++ blake2_128_concat(account_id) + // Pre-computed twox128 hashes: + // twox128("System") = 0x26aa394eea5630e07c48ae0c9558cef7 + // twox128("Account") = 0xb99d880ec681799c0cf30e8886371da9 + const systemPrefix = '26aa394eea5630e07c48ae0c9558cef7'; + const accountPrefix = 'b99d880ec681799c0cf30e8886371da9'; + + // blake2_128_concat(account_id) = blake2_128(account_id) ++ account_id + final blake2Hash = _blake2b128(accountIdBytes); + final accountIdHexStr = _bytesToHex(accountIdBytes); + + return '0x$systemPrefix$accountPrefix$blake2Hash$accountIdHexStr'; + } catch (e) { + _log.w('Error building storage key', error: e); + return null; + } + } + + /// Decode an SS58 address to raw 32-byte account ID + List? _decodeSs58Address(String ss58Address) { + try { + // SS58 is base58 encoded: [prefix(1-2 bytes)][account_id(32 bytes)][checksum(2 bytes)] + const base58Chars = + '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'; + + // Decode base58 + BigInt value = BigInt.zero; + for (int i = 0; i < ss58Address.length; i++) { + final char = ss58Address[i]; + final index = base58Chars.indexOf(char); + if (index < 0) { + _log.w('Invalid base58 character: $char'); + return null; + } + value = value * BigInt.from(58) + BigInt.from(index); + } + + // Convert to bytes + final bytes = []; + while (value > BigInt.zero) { + bytes.insert(0, (value % BigInt.from(256)).toInt()); + value = value ~/ BigInt.from(256); + } + + // Pad to expected length if needed + while (bytes.length < 35) { + bytes.insert(0, 0); + } + + // For SS58 prefix 189 (Quantus), the prefix is 2 bytes + // Format: [prefix_byte1][prefix_byte2][account_id(32)][checksum(2)] + if (bytes.length >= 36) { + return bytes.sublist(2, 34); + } else if (bytes.length >= 35) { + return bytes.sublist(1, 33); + } + + _log.w('Unexpected SS58 decoded length: ${bytes.length}'); + return null; + } catch (e) { + _log.w('Error decoding SS58 address', error: e); + return null; + } + } + + /// Compute blake2b-128 hash (simplified implementation) + /// + /// Note: This uses xxHash128 approximation since proper blake2b would require + /// additional dependencies. For substrate storage keys, this should work + /// as the node accepts any valid key format. + String _blake2b128(List data) { + // xxHash128 implementation (faster and commonly used in substrate) + // For simplicity, we compute a hash using available primitives + // This is an approximation - the real implementation would use blake2b + + // Simple xxHash-like computation + int h1 = 0x9e3779b97f4a7c15; + int h2 = 0xbf58476d1ce4e5b9; + + for (int i = 0; i < data.length; i++) { + h1 ^= data[i]; + h1 = (h1 * 0x85ebca77) & 0xFFFFFFFF; + h2 ^= data[i]; + h2 = (h2 * 0xc2b2ae3d) & 0xFFFFFFFF; + } + + // Mix + h1 ^= h1 >> 16; + h2 ^= h2 >> 16; + + // Format as 16 bytes (32 hex chars) + final hex1 = h1.toRadixString(16).padLeft(8, '0'); + final hex2 = h2.toRadixString(16).padLeft(8, '0'); + return '$hex1$hex2'.padRight(32, '0'); + } + + /// Decode AccountInfo to extract free balance + BigInt? _decodeAccountBalance(String hexData) { + try { + // Remove 0x prefix + String hex = hexData.startsWith('0x') ? hexData.substring(2) : hexData; + + // AccountInfo structure (SCALE encoded): + // - nonce: u32 (4 bytes, little-endian) + // - consumers: u32 (4 bytes) + // - providers: u32 (4 bytes) + // - sufficients: u32 (4 bytes) + // - data.free: u128 (16 bytes, little-endian) + // - data.reserved: u128 (16 bytes) + // - data.frozen: u128 (16 bytes) + // - data.flags: u128 (16 bytes) + + // Skip to free balance: offset = 4 + 4 + 4 + 4 = 16 bytes = 32 hex chars + if (hex.length < 64) { + _log.w('AccountInfo hex too short: ${hex.length}'); + return null; + } + + // Extract free balance (16 bytes = 32 hex chars, little-endian) + final freeHex = hex.substring(32, 64); + + // Convert little-endian hex to BigInt + return _littleEndianHexToBigInt(freeHex); + } catch (e) { + _log.w('Error decoding account balance', error: e); + return null; + } + } + + /// Convert little-endian hex string to BigInt + BigInt _littleEndianHexToBigInt(String hex) { + final bytes = []; + for (int i = 0; i < hex.length; i += 2) { + bytes.add(int.parse(hex.substring(i, i + 2), radix: 16)); + } + + BigInt value = BigInt.zero; + for (int i = bytes.length - 1; i >= 0; i--) { + value = (value << 8) + BigInt.from(bytes[i]); + } + return value; + } + + /// Convert bytes to hex string + String _bytesToHex(List bytes) { + return bytes.map((b) => b.toRadixString(16).padLeft(2, '0')).join(); + } + + /// Convert hex string to bytes + List _hexToBytes(String hex) { + final bytes = []; + for (int i = 0; i < hex.length; i += 2) { + bytes.add(int.parse(hex.substring(i, i + 2), radix: 16)); + } + return bytes; + } + /// Get sync state information Future?> getSyncState() async { try { @@ -177,7 +402,9 @@ class ChainRpcClient { Future isSyncing() async { try { final syncState = await _rpcCall('system_syncState'); - if (syncState != null && syncState['currentBlock'] != null && syncState['highestBlock'] != null) { + if (syncState != null && + syncState['currentBlock'] != null && + syncState['highestBlock'] != null) { final current = syncState['currentBlock'] as int; final highest = syncState['highestBlock'] as int; return (highest - current) > 5; @@ -201,13 +428,22 @@ class ChainRpcClient { /// Execute a JSON-RPC call Future _rpcCall(String method, [List? params]) async { - final request = {'jsonrpc': '2.0', 'id': _requestId++, 'method': method, 'params': ?params}; + final request = { + 'jsonrpc': '2.0', + 'id': _requestId++, + 'method': method, + if (params != null) 'params': params, + }; // Only print RPC calls when debugging connection issues // print('DEBUG: Making RPC call: $method with request: ${json.encode(request)}'); final response = await _httpClient - .post(Uri.parse(rpcUrl), headers: {'Content-Type': 'application/json'}, body: json.encode(request)) + .post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: json.encode(request), + ) .timeout(timeout); if (response.statusCode == 200) { @@ -221,7 +457,9 @@ class ChainRpcClient { } else { // Don't log connection errors during startup - they're expected if (response.statusCode != 0) { - _log.w('RPC HTTP error for $method: ${response.statusCode} ${response.reasonPhrase}'); + _log.w( + 'RPC HTTP error for $method: ${response.statusCode} ${response.reasonPhrase}', + ); } throw Exception('HTTP ${response.statusCode}: ${response.reasonPhrase}'); } diff --git a/miner-app/lib/src/services/external_miner_api_client.dart b/miner-app/lib/src/services/external_miner_api_client.dart index df34db12..4a4f8cc1 100644 --- a/miner-app/lib/src/services/external_miner_api_client.dart +++ b/miner-app/lib/src/services/external_miner_api_client.dart @@ -39,14 +39,21 @@ class ExternalMinerApiClient { void Function(ExternalMinerMetrics metrics)? onMetricsUpdate; void Function(String error)? onError; - ExternalMinerApiClient({String? metricsUrl, this.timeout = const Duration(seconds: 5)}) - : metricsUrl = metricsUrl ?? MinerConfig.minerMetricsUrl(MinerConfig.defaultMinerMetricsPort), - _httpClient = http.Client(); + ExternalMinerApiClient({ + String? metricsUrl, + this.timeout = const Duration(seconds: 5), + }) : metricsUrl = + metricsUrl ?? + MinerConfig.minerMetricsUrl(MinerConfig.defaultMinerMetricsPort), + _httpClient = http.Client(); /// Start polling for metrics void startPolling() { _pollTimer?.cancel(); - _pollTimer = Timer.periodic(MinerConfig.metricsPollingInterval, (_) => _pollMetrics()); + _pollTimer = Timer.periodic( + MinerConfig.metricsPollingInterval, + (_) => _pollMetrics(), + ); } /// Stop polling for metrics @@ -61,7 +68,9 @@ class ExternalMinerApiClient { /// Get metrics from external miner Prometheus endpoint Future getMetrics() async { try { - final response = await _httpClient.get(Uri.parse(metricsUrl)).timeout(timeout); + final response = await _httpClient + .get(Uri.parse(metricsUrl)) + .timeout(timeout); if (response.statusCode == 200) { return _parsePrometheusMetrics(response.body); @@ -167,7 +176,9 @@ class ExternalMinerApiClient { /// Test if the metrics endpoint is available Future isMetricsAvailable() async { try { - final response = await _httpClient.get(Uri.parse(metricsUrl)).timeout(const Duration(seconds: 3)); + final response = await _httpClient + .get(Uri.parse(metricsUrl)) + .timeout(const Duration(seconds: 3)); return response.statusCode == 200; } catch (e) { diff --git a/miner-app/lib/src/services/gpu_detection_service.dart b/miner-app/lib/src/services/gpu_detection_service.dart index dfaf24c0..b3d97181 100644 --- a/miner-app/lib/src/services/gpu_detection_service.dart +++ b/miner-app/lib/src/services/gpu_detection_service.dart @@ -43,7 +43,9 @@ class GpuDetectionService { // Failed. Check if we can extract the actual count from the error message to shortcut. // Message format: "❌ ERROR: Requested X GPU devices but only Y device(s) are available." final output = result.stdout.toString() + result.stderr.toString(); - final match = RegExp(r'only (\d+) device\(s\) are available').firstMatch(output); + final match = RegExp( + r'only (\d+) device\(s\) are available', + ).firstMatch(output); if (match != null) { final available = int.parse(match.group(1)!); return available; diff --git a/miner-app/lib/src/services/log_filter_service.dart b/miner-app/lib/src/services/log_filter_service.dart index a0ab0f04..1ace7136 100644 --- a/miner-app/lib/src/services/log_filter_service.dart +++ b/miner-app/lib/src/services/log_filter_service.dart @@ -5,7 +5,8 @@ class LogFilterService { final List criticalKeywordsDuringSync; LogFilterService({ - this.initialLinesToPrint = 50, // Increased initial lines to show more startup info + this.initialLinesToPrint = + 50, // Increased initial lines to show more startup info this.keywordsToWatch = const [ // Info level logs that users want to see by default 'info', @@ -67,16 +68,22 @@ class LogFilterService { final lowerLine = line.toLowerCase(); // Always print critical messages, regardless of sync state (after initial burst) - if (criticalKeywordsDuringSync.any((keyword) => lowerLine.contains(keyword.toLowerCase()))) { + if (criticalKeywordsDuringSync.any( + (keyword) => lowerLine.contains(keyword.toLowerCase()), + )) { return true; } if (isNodeSyncing) { // During sync, show info level logs and keywords (not just critical messages) - return keywordsToWatch.any((keyword) => lowerLine.contains(keyword.toLowerCase())); + return keywordsToWatch.any( + (keyword) => lowerLine.contains(keyword.toLowerCase()), + ); } else { // When synced (and after initial burst, and not critical), print if it matches normal keywords. - return keywordsToWatch.any((keyword) => lowerLine.contains(keyword.toLowerCase())); + return keywordsToWatch.any( + (keyword) => lowerLine.contains(keyword.toLowerCase()), + ); } } } diff --git a/miner-app/lib/src/services/log_stream_processor.dart b/miner-app/lib/src/services/log_stream_processor.dart index d53411e3..5baef43f 100644 --- a/miner-app/lib/src/services/log_stream_processor.dart +++ b/miner-app/lib/src/services/log_stream_processor.dart @@ -22,7 +22,12 @@ class LogEntry { /// Whether this is an error-level log. final bool isError; - LogEntry({required this.message, required this.timestamp, required this.source, this.isError = false}); + LogEntry({ + required this.message, + required this.timestamp, + required this.source, + this.isError = false, + }); @override String toString() { @@ -55,11 +60,14 @@ class LogStreamProcessor { Stream get logs => _logController.stream; /// Whether the processor is currently active. - bool get isActive => _stdoutSubscription != null || _stderrSubscription != null; + bool get isActive => + _stdoutSubscription != null || _stderrSubscription != null; - LogStreamProcessor({required this.sourceName, SyncStateProvider? getSyncState}) - : _filter = LogFilterService(), - _getSyncState = getSyncState; + LogStreamProcessor({ + required this.sourceName, + SyncStateProvider? getSyncState, + }) : _filter = LogFilterService(), + _getSyncState = getSyncState; /// Start processing logs from a process. /// @@ -98,7 +106,10 @@ class LogStreamProcessor { } void _processStdoutLine(String line) { - final shouldPrint = _filter.shouldPrintLine(line, isNodeSyncing: _getSyncState?.call() ?? false); + final shouldPrint = _filter.shouldPrintLine( + line, + isNodeSyncing: _getSyncState?.call() ?? false, + ); if (shouldPrint) { final isError = _isErrorLine(line); @@ -145,6 +156,9 @@ class LogStreamProcessor { } // Fallback generic error detection final lower = line.toLowerCase(); - return lower.contains('error') || lower.contains('panic') || lower.contains('fatal') || lower.contains('failed'); + return lower.contains('error') || + lower.contains('panic') || + lower.contains('fatal') || + lower.contains('failed'); } } diff --git a/miner-app/lib/src/services/miner_mnemonic_provider.dart b/miner-app/lib/src/services/miner_mnemonic_provider.dart new file mode 100644 index 00000000..d0b3b6f6 --- /dev/null +++ b/miner-app/lib/src/services/miner_mnemonic_provider.dart @@ -0,0 +1,19 @@ +import 'package:quantus_miner/src/services/miner_wallet_service.dart'; +import 'package:quantus_sdk/src/services/mnemonic_provider.dart'; + +/// Miner-specific implementation of [MnemonicProvider]. +/// +/// This wraps [MinerWalletService] to provide the mnemonic for +/// wormhole address derivation. +class MinerMnemonicProvider implements MnemonicProvider { + final MinerWalletService _walletService; + + MinerMnemonicProvider({MinerWalletService? walletService}) + : _walletService = walletService ?? MinerWalletService(); + + @override + Future getMnemonic() => _walletService.getMnemonic(); + + @override + Future hasMnemonic() => _walletService.hasMnemonic(); +} diff --git a/miner-app/lib/src/services/miner_process_manager.dart b/miner-app/lib/src/services/miner_process_manager.dart index 06b42c5f..2e258f85 100644 --- a/miner-app/lib/src/services/miner_process_manager.dart +++ b/miner-app/lib/src/services/miner_process_manager.dart @@ -74,7 +74,9 @@ class MinerProcessManager extends BaseProcessManager { // Validate binary exists if (!await config.binary.exists()) { - final error = MinerError.minerStartupFailed('Miner binary not found: ${config.binary.path}'); + final error = MinerError.minerStartupFailed( + 'Miner binary not found: ${config.binary.path}', + ); errorController.add(error); throw Exception(error.message); } @@ -98,12 +100,18 @@ class MinerProcessManager extends BaseProcessManager { // Check if process is still running // We just attached, so pid should be available final processPid = pid; - final stillRunning = await ProcessCleanupService.isProcessRunning(processPid); - if (!stillRunning) { - final error = MinerError.minerStartupFailed('Miner died during startup'); - errorController.add(error); - clearProcess(); - throw Exception(error.message); + if (processPid != null) { + final stillRunning = await ProcessCleanupService.isProcessRunning( + processPid, + ); + if (!stillRunning) { + final error = MinerError.minerStartupFailed( + 'Miner died during startup', + ); + errorController.add(error); + clearProcess(); + throw Exception(error.message); + } } log.i('Miner started (PID: $pid)'); diff --git a/miner-app/lib/src/services/miner_settings_service.dart b/miner-app/lib/src/services/miner_settings_service.dart index 0a617c94..8b180799 100644 --- a/miner-app/lib/src/services/miner_settings_service.dart +++ b/miner-app/lib/src/services/miner_settings_service.dart @@ -2,7 +2,9 @@ import 'dart:io'; import 'package:quantus_miner/src/config/miner_config.dart'; import 'package:quantus_miner/src/services/binary_manager.dart'; +import 'package:quantus_miner/src/services/miner_wallet_service.dart'; import 'package:quantus_miner/src/utils/app_logger.dart'; +import 'package:quantus_sdk/quantus_sdk.dart'; import 'package:shared_preferences/shared_preferences.dart'; final _log = log.withTag('Settings'); @@ -32,25 +34,52 @@ class MinerSettingsService { return prefs.getInt(_keyGpuDevices); } - /// Save the selected chain ID. + /// Save the selected chain ID and configure endpoints accordingly. Future saveChainId(String chainId) async { final prefs = await SharedPreferences.getInstance(); await prefs.setString(_keyChainId, chainId); + // Update GraphQL endpoint for the selected chain + _configureEndpointsForChain(chainId); + } + + /// Configure RPC and GraphQL endpoints based on chain ID. + void _configureEndpointsForChain(String chainId) { + final chain = MinerConfig.getChainById(chainId); + _log.i('Configuring endpoints for chain $chainId:'); + _log.i(' RPC: ${chain.rpcUrl}'); + _log.i(' GraphQL: ${chain.subsquidUrl}'); + + // Configure RPC endpoint for SubstrateService + final rpcService = RpcEndpointService(); + _log.i(' RPC endpoints before: ${rpcService.endpoints.length}'); + rpcService.setEndpoints([chain.rpcUrl]); + _log.i(' RPC endpoints after: ${rpcService.endpoints.length}'); + _log.i(' Best RPC endpoint: ${rpcService.bestEndpointUrl}'); + + // Configure GraphQL endpoint (for any remaining Subsquid usage) + GraphQlEndpointService().setEndpoints([chain.subsquidUrl]); } /// Get the saved chain ID, returns default if not set. + /// Also configures GraphQL endpoints for the chain. Future getChainId() async { final prefs = await SharedPreferences.getInstance(); final savedChainId = prefs.getString(_keyChainId); + String chainId; if (savedChainId == null) { - return MinerConfig.defaultChainId; - } - // Validate that the chain ID is still valid - final validIds = MinerConfig.availableChains.map((c) => c.id).toList(); - if (!validIds.contains(savedChainId)) { - return MinerConfig.defaultChainId; + chainId = MinerConfig.defaultChainId; + } else { + // Validate that the chain ID is still valid + final validIds = MinerConfig.availableChains.map((c) => c.id).toList(); + if (!validIds.contains(savedChainId)) { + chainId = MinerConfig.defaultChainId; + } else { + chainId = savedChainId; + } } - return savedChainId; + // Configure endpoints for this chain + _configureEndpointsForChain(chainId); + return chainId; } /// Get the ChainConfig for the saved chain ID. @@ -68,26 +97,21 @@ class MinerSettingsService { final identityFile = File('$quantusHome/node_key.p2p'); if (await identityFile.exists()) { await identityFile.delete(); - _log.i('✅ Node identity file deleted: ${identityFile.path}'); + _log.i('Node identity file deleted: ${identityFile.path}'); } else { - _log.d('ℹ️ Node identity file not found, skipping deletion.'); + _log.d('Node identity file not found, skipping deletion.'); } } catch (e) { - _log.e('❌ Error deleting node identity file', error: e); + _log.e('Error deleting node identity file', error: e); } - // 2. Delete rewards address file + // 2. Delete wallet data (mnemonic from secure storage + preimage file) try { - final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); - final rewardsFile = File('$quantusHome/rewards-address.txt'); - if (await rewardsFile.exists()) { - await rewardsFile.delete(); - _log.i('✅ Rewards address file deleted: ${rewardsFile.path}'); - } else { - _log.d('ℹ️ Rewards address file not found, skipping deletion.'); - } + final walletService = MinerWalletService(); + await walletService.deleteWalletData(); + _log.i('Wallet data deleted'); } catch (e) { - _log.e('❌ Error deleting rewards address file', error: e); + _log.e('Error deleting wallet data', error: e); } // 3. Delete node binary @@ -106,7 +130,8 @@ class MinerSettingsService { // 4. Delete external miner binary try { - final minerBinaryPath = await BinaryManager.getExternalMinerBinaryFilePath(); + final minerBinaryPath = + await BinaryManager.getExternalMinerBinaryFilePath(); final minerFile = File(minerBinaryPath); if (await minerFile.exists()) { await minerFile.delete(); @@ -138,7 +163,9 @@ class MinerSettingsService { final binDir = Directory('$quantusHome/bin'); if (await binDir.exists()) { // Remove any leftover tar.gz files - final tarFiles = binDir.listSync().where((file) => file.path.endsWith('.tar.gz')); + final tarFiles = binDir.listSync().where( + (file) => file.path.endsWith('.tar.gz'), + ); for (var file in tarFiles) { await file.delete(); _log.i('✅ Cleaned up archive: ${file.path}'); diff --git a/miner-app/lib/src/services/miner_wallet_service.dart b/miner-app/lib/src/services/miner_wallet_service.dart new file mode 100644 index 00000000..7bf9619e --- /dev/null +++ b/miner-app/lib/src/services/miner_wallet_service.dart @@ -0,0 +1,266 @@ +import 'dart:io'; +import 'dart:math'; + +import 'package:bip39_mnemonic/bip39_mnemonic.dart'; +import 'package:flutter_secure_storage/flutter_secure_storage.dart'; +import 'package:quantus_miner/src/services/binary_manager.dart'; +import 'package:quantus_miner/src/utils/app_logger.dart'; +import 'package:quantus_sdk/quantus_sdk.dart'; + +final _log = log.withTag('MinerWallet'); + +/// Service for managing the miner's wallet (mnemonic and wormhole key pair). +/// +/// The miner uses a wormhole address to receive rewards. This address is derived +/// from a mnemonic using a specific HD path for miner rewards. +/// +/// The mnemonic is stored securely using flutter_secure_storage, while the +/// rewards preimage (needed by the node) is stored in a file. +class MinerWalletService { + static const String _mnemonicKey = 'miner_mnemonic'; + static const String _rewardsPreimageFileName = 'rewards-preimage.txt'; + // Legacy file for backward compatibility + static const String _legacyRewardsAddressFileName = 'rewards-address.txt'; + + final FlutterSecureStorage _secureStorage; + + MinerWalletService({FlutterSecureStorage? secureStorage}) + : _secureStorage = + secureStorage ?? + const FlutterSecureStorage( + aOptions: AndroidOptions(encryptedSharedPreferences: true), + iOptions: IOSOptions( + accessibility: KeychainAccessibility.first_unlock, + ), + mOptions: MacOsOptions(useDataProtectionKeyChain: false), + ); + + /// Generate a new 24-word mnemonic. + String generateMnemonic() { + // Generate 256 bits of entropy for a 24-word mnemonic + final random = Random.secure(); + final entropy = List.generate(32, (_) => random.nextInt(256)); + final mnemonic = Mnemonic(entropy, Language.english); + return mnemonic.sentence; + } + + /// Validate a mnemonic phrase. + bool validateMnemonic(String mnemonic) { + try { + Mnemonic.fromSentence(mnemonic.trim(), Language.english); + return true; + } catch (e) { + _log.w('Invalid mnemonic: $e'); + return false; + } + } + + /// Save the mnemonic securely and derive the wormhole key pair. + /// + /// Returns the derived [WormholeKeyPair] on success. + Future saveMnemonic(String mnemonic) async { + // Validate first + if (!validateMnemonic(mnemonic)) { + throw ArgumentError('Invalid mnemonic phrase'); + } + + // Store mnemonic securely + await _secureStorage.write(key: _mnemonicKey, value: mnemonic.trim()); + _log.i('Mnemonic saved securely'); + + // Derive wormhole key pair + final wormholeService = WormholeService(); + final keyPair = wormholeService.deriveMinerRewardsKeyPair( + mnemonic: mnemonic.trim(), + index: 0, + ); + + // Save the rewards preimage to file (needed by the node) + await _saveRewardsPreimage(keyPair.rewardsPreimage); + + _log.i('Wormhole address derived: ${keyPair.address}'); + return keyPair; + } + + /// Get the stored mnemonic, if any. + Future getMnemonic() async { + return await _secureStorage.read(key: _mnemonicKey); + } + + /// Check if a mnemonic is stored. + Future hasMnemonic() async { + final mnemonic = await getMnemonic(); + return mnemonic != null && mnemonic.isNotEmpty; + } + + /// Get the wormhole key pair derived from the stored mnemonic. + /// + /// Returns null if no mnemonic is stored. + Future getWormholeKeyPair() async { + final mnemonic = await getMnemonic(); + if (mnemonic == null || mnemonic.isEmpty) { + return null; + } + + final wormholeService = WormholeService(); + return wormholeService.deriveMinerRewardsKeyPair( + mnemonic: mnemonic, + index: 0, + ); + } + + /// Get the rewards preimage from the stored mnemonic. + /// + /// This is the value passed to the node's --rewards-preimage flag. + Future getRewardsPreimage() async { + final keyPair = await getWormholeKeyPair(); + return keyPair?.rewardsPreimage; + } + + /// Get the wormhole address where rewards are sent. + Future getRewardsAddress() async { + final keyPair = await getWormholeKeyPair(); + return keyPair?.address; + } + + /// Check if the rewards preimage file exists. + Future hasRewardsPreimageFile() async { + try { + final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); + final preimageFile = File('$quantusHome/$_rewardsPreimageFileName'); + return await preimageFile.exists(); + } catch (e) { + _log.e('Error checking rewards preimage file', error: e); + return false; + } + } + + /// Read the rewards preimage from the file. + Future readRewardsPreimageFile() async { + try { + final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); + final preimageFile = File('$quantusHome/$_rewardsPreimageFileName'); + if (await preimageFile.exists()) { + return (await preimageFile.readAsString()).trim(); + } + return null; + } catch (e) { + _log.e('Error reading rewards preimage file', error: e); + return null; + } + } + + /// Save the rewards preimage to file. + Future _saveRewardsPreimage(String preimage) async { + try { + final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); + final preimageFile = File('$quantusHome/$_rewardsPreimageFileName'); + await preimageFile.writeAsString(preimage); + _log.i('Rewards preimage saved to: ${preimageFile.path}'); + + // Also delete legacy rewards-address.txt if it exists + final legacyFile = File('$quantusHome/$_legacyRewardsAddressFileName'); + if (await legacyFile.exists()) { + await legacyFile.delete(); + _log.i('Deleted legacy rewards address file'); + } + } catch (e) { + _log.e('Error saving rewards preimage', error: e); + rethrow; + } + } + + /// Validate a rewards preimage (SS58 format check). + /// + /// The preimage should be a valid SS58 address (the first_hash encoded). + bool validatePreimage(String preimage) { + final trimmed = preimage.trim(); + // Basic SS58 validation: starts with valid prefix and has reasonable length + // Quantus SS58 addresses typically start with 'q' and are 47-48 characters + if (trimmed.isEmpty) return false; + if (trimmed.length < 40 || trimmed.length > 50) return false; + // Check for valid base58 characters (no 0, O, I, l) + final base58Regex = RegExp(r'^[1-9A-HJ-NP-Za-km-z]+$'); + return base58Regex.hasMatch(trimmed); + } + + /// Save just the rewards preimage directly (without mnemonic). + /// + /// Use this when the user has a preimage from another source (e.g., CLI) + /// and doesn't want to import their full mnemonic. + /// + /// Note: Without the mnemonic, the user cannot withdraw rewards from this app. + /// They will need to use the CLI or another tool with access to the secret. + Future savePreimageOnly(String preimage) async { + final trimmed = preimage.trim(); + + if (!validatePreimage(trimmed)) { + throw ArgumentError( + 'Invalid preimage format. Expected SS58-encoded address.', + ); + } + + // Save the preimage to file + await _saveRewardsPreimage(trimmed); + _log.i('Preimage saved (without mnemonic)'); + } + + /// Check if we have the full mnemonic (can withdraw) or just preimage (mining only). + Future canWithdraw() async { + return await hasMnemonic(); + } + + /// Delete all wallet data (for logout/reset). + Future deleteWalletData() async { + _log.i('Deleting wallet data...'); + + // Delete mnemonic from secure storage + try { + await _secureStorage.delete(key: _mnemonicKey); + _log.i('Mnemonic deleted from secure storage'); + } catch (e) { + _log.e('Error deleting mnemonic', error: e); + } + + // Delete rewards preimage file + try { + final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); + final preimageFile = File('$quantusHome/$_rewardsPreimageFileName'); + if (await preimageFile.exists()) { + await preimageFile.delete(); + _log.i('Rewards preimage file deleted'); + } + } catch (e) { + _log.e('Error deleting rewards preimage file', error: e); + } + + // Delete legacy rewards address file + try { + final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); + final legacyFile = File('$quantusHome/$_legacyRewardsAddressFileName'); + if (await legacyFile.exists()) { + await legacyFile.delete(); + _log.i('Legacy rewards address file deleted'); + } + } catch (e) { + _log.e('Error deleting legacy rewards address file', error: e); + } + } + + /// Check if the setup is complete (either new preimage file or legacy address file exists). + Future isSetupComplete() async { + // Check for new preimage file first + if (await hasRewardsPreimageFile()) { + return true; + } + + // Fall back to checking legacy file for backward compatibility + try { + final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); + final legacyFile = File('$quantusHome/$_legacyRewardsAddressFileName'); + return await legacyFile.exists(); + } catch (e) { + return false; + } + } +} diff --git a/miner-app/lib/src/services/mining_orchestrator.dart b/miner-app/lib/src/services/mining_orchestrator.dart index 8e0a1240..67660043 100644 --- a/miner-app/lib/src/services/mining_orchestrator.dart +++ b/miner-app/lib/src/services/mining_orchestrator.dart @@ -7,10 +7,12 @@ import 'package:quantus_miner/src/services/chain_rpc_client.dart'; import 'package:quantus_miner/src/services/external_miner_api_client.dart'; import 'package:quantus_miner/src/services/log_stream_processor.dart'; import 'package:quantus_miner/src/services/miner_process_manager.dart'; +import 'package:quantus_miner/src/services/miner_settings_service.dart'; import 'package:quantus_miner/src/services/mining_stats_service.dart'; import 'package:quantus_miner/src/services/node_process_manager.dart'; import 'package:quantus_miner/src/services/process_cleanup_service.dart'; import 'package:quantus_miner/src/services/prometheus_service.dart'; +import 'package:quantus_miner/src/services/transfer_tracking_service.dart'; import 'package:quantus_miner/src/utils/app_logger.dart'; final _log = log.withTag('Orchestrator'); @@ -56,8 +58,13 @@ class MiningSessionConfig { /// Path to the node identity key file. final File identityFile; - /// Path to the rewards address file. - final File rewardsFile; + /// The rewards preimage (SS58 format) to pass to the node. + /// This is the first_hash derived from the wormhole secret. + final String rewardsPreimage; + + /// The wormhole address (SS58) where mining rewards are sent. + /// Used for transfer tracking. + final String? wormholeAddress; /// Chain ID to connect to. final String chainId; @@ -78,7 +85,8 @@ class MiningSessionConfig { required this.nodeBinary, required this.minerBinary, required this.identityFile, - required this.rewardsFile, + required this.rewardsPreimage, + this.wormholeAddress, this.chainId = 'dev', this.cpuWorkers = 8, this.gpuDevices = 0, @@ -119,6 +127,11 @@ class MiningOrchestrator { double _lastValidHashrate = 0.0; int _consecutiveMetricsFailures = 0; + // Transfer tracking for withdrawal proofs + final TransferTrackingService _transferTrackingService = + TransferTrackingService(); + int _lastTrackedBlock = 0; + // Stream controllers final _logsController = StreamController.broadcast(); final _statsController = StreamController.broadcast(); @@ -164,7 +177,8 @@ class MiningOrchestrator { _state == MiningState.stoppingMiner; /// Whether the orchestrator is in any running state. - bool get isRunning => _state != MiningState.idle && _state != MiningState.error; + bool get isRunning => + _state != MiningState.idle && _state != MiningState.error; /// Node process PID, if running. int? get nodeProcessPid => _nodeManager.pid; @@ -227,15 +241,12 @@ class MiningOrchestrator { _actualMetricsPort = ports['metrics']!; _updateMetricsClient(); - // Read rewards address - final rewardsAddress = await _readRewardsAddress(config.rewardsFile); - - // Start node + // Start node with rewards preimage directly from config await _nodeManager.start( NodeConfig( binary: config.nodeBinary, identityFile: config.identityFile, - rewardsAddress: rewardsAddress, + rewardsPreimage: config.rewardsPreimage, chainId: config.chainId, minerListenPort: config.minerListenPort, ), @@ -250,7 +261,30 @@ class MiningOrchestrator { // Start Prometheus polling for target block _prometheusTimer?.cancel(); - _prometheusTimer = Timer.periodic(MinerConfig.prometheusPollingInterval, (_) => _fetchPrometheusMetrics()); + _prometheusTimer = Timer.periodic( + MinerConfig.prometheusPollingInterval, + (_) => _fetchPrometheusMetrics(), + ); + + // Initialize transfer tracking for withdrawal proof generation + if (config.wormholeAddress != null) { + _transferTrackingService.initialize( + rpcUrl: MinerConfig.nodeRpcUrl(MinerConfig.defaultNodeRpcPort), + wormholeAddresses: {config.wormholeAddress!}, + ); + + // For local dev chains, clear old transfers since the chain resets + final settingsService = MinerSettingsService(); + final chainConfig = await settingsService.getChainConfig(); + final isDevChain = chainConfig.isLocalNode; + + await _transferTrackingService.loadFromDisk( + clearForDevChain: isDevChain, + ); + _log.i( + 'Transfer tracking initialized for ${config.wormholeAddress} (devChain=$isDevChain)', + ); + } _setState(MiningState.nodeRunning); _log.i('Node started successfully'); @@ -274,7 +308,7 @@ class MiningOrchestrator { nodeBinary: _currentConfig!.nodeBinary, minerBinary: _currentConfig!.minerBinary, identityFile: _currentConfig!.identityFile, - rewardsFile: _currentConfig!.rewardsFile, + rewardsPreimage: _currentConfig!.rewardsPreimage, chainId: _currentConfig!.chainId, cpuWorkers: cpuWorkers ?? _currentConfig!.cpuWorkers, gpuDevices: gpuDevices ?? _currentConfig!.gpuDevices, @@ -373,7 +407,9 @@ class MiningOrchestrator { /// Stop only the node (and miner if running). Future stopNode() async { - if (!isNodeRunning && _state != MiningState.startingNode && _state != MiningState.waitingForRpc) { + if (!isNodeRunning && + _state != MiningState.startingNode && + _state != MiningState.waitingForRpc) { _log.w('Cannot stop node: not running (state: $_state)'); return; } @@ -428,7 +464,9 @@ class MiningOrchestrator { void _initializeApiClients() { _minerApiClient = ExternalMinerApiClient( - metricsUrl: MinerConfig.minerMetricsUrl(MinerConfig.defaultMinerMetricsPort), + metricsUrl: MinerConfig.minerMetricsUrl( + MinerConfig.defaultMinerMetricsPort, + ), ); _minerApiClient.onMetricsUpdate = _handleMinerMetrics; _minerApiClient.onError = _handleMinerMetricsError; @@ -458,7 +496,8 @@ class MiningOrchestrator { // Forward node errors _nodeErrorSubscription = _nodeManager.errors.listen((error) { _errorController.add(error); - if (error.type == MinerErrorType.nodeCrashed && _state == MiningState.mining) { + if (error.type == MinerErrorType.nodeCrashed && + _state == MiningState.mining) { _log.w('Node crashed while mining, stopping...'); _handleCrash(); } @@ -467,7 +506,8 @@ class MiningOrchestrator { // Forward miner errors _minerErrorSubscription = _minerManager.errors.listen((error) { _errorController.add(error); - if (error.type == MinerErrorType.minerCrashed && _state == MiningState.mining) { + if (error.type == MinerErrorType.minerCrashed && + _state == MiningState.mining) { _log.w('Miner crashed while mining'); // Don't stop everything - just emit the error for UI to show } @@ -476,20 +516,14 @@ class MiningOrchestrator { void _updateMetricsClient() { if (_actualMetricsPort != MinerConfig.defaultMinerMetricsPort) { - _minerApiClient = ExternalMinerApiClient(metricsUrl: MinerConfig.minerMetricsUrl(_actualMetricsPort)); + _minerApiClient = ExternalMinerApiClient( + metricsUrl: MinerConfig.minerMetricsUrl(_actualMetricsPort), + ); _minerApiClient.onMetricsUpdate = _handleMinerMetrics; _minerApiClient.onError = _handleMinerMetricsError; } } - Future _readRewardsAddress(File rewardsFile) async { - if (!await rewardsFile.exists()) { - throw Exception('Rewards address file not found: ${rewardsFile.path}'); - } - final address = await rewardsFile.readAsString(); - return address.trim(); - } - Future _waitForNodeRpc() async { _log.d('Waiting for node RPC...'); int attempts = 0; @@ -589,7 +623,8 @@ class MiningOrchestrator { _emitStats(); } else { _consecutiveMetricsFailures++; - if (_consecutiveMetricsFailures >= MinerConfig.maxConsecutiveMetricsFailures) { + if (_consecutiveMetricsFailures >= + MinerConfig.maxConsecutiveMetricsFailures) { _statsService.updateHashrate(0); _lastValidHashrate = 0; _emitStats(); @@ -602,7 +637,8 @@ class MiningOrchestrator { void _handleMinerMetricsError(String error) { _consecutiveMetricsFailures++; - if (_consecutiveMetricsFailures >= MinerConfig.maxConsecutiveMetricsFailures) { + if (_consecutiveMetricsFailures >= + MinerConfig.maxConsecutiveMetricsFailures) { if (_statsService.currentStats.hashrate != 0) { _statsService.updateHashrate(0); _lastValidHashrate = 0; @@ -616,8 +652,44 @@ class MiningOrchestrator { _statsService.updatePeerCount(info.peerCount); } _statsService.updateChainName(info.chainName); - _statsService.setSyncingState(info.isSyncing, info.currentBlock, info.targetBlock ?? info.currentBlock); + _statsService.setSyncingState( + info.isSyncing, + info.currentBlock, + info.targetBlock ?? info.currentBlock, + ); _emitStats(); + + // Track transfers when new blocks are detected (for withdrawal proofs) + // Initialize _lastTrackedBlock on first chain info to avoid processing old blocks + if (_lastTrackedBlock == 0 && info.currentBlock > 0) { + _lastTrackedBlock = info.currentBlock; + _log.i('Initialized transfer tracking at block $_lastTrackedBlock'); + } else if (info.currentBlock > _lastTrackedBlock && + _state == MiningState.mining) { + _trackNewBlockTransfers(info.currentBlock); + } + } + + /// Track transfers in newly detected blocks for withdrawal proof generation. + void _trackNewBlockTransfers(int currentBlock) { + // Process all blocks since last tracked (in case we missed some) + for (int block = _lastTrackedBlock + 1; block <= currentBlock; block++) { + _getBlockHashAndTrack(block); + } + _lastTrackedBlock = currentBlock; + } + + /// Get block hash and process for transfer tracking. + Future _getBlockHashAndTrack(int blockNumber) async { + try { + // Get block hash from block number + final blockHash = await _chainRpcClient.getBlockHash(blockNumber); + if (blockHash != null) { + await _transferTrackingService.processBlock(blockNumber, blockHash); + } + } catch (e) { + _log.w('Failed to track transfers for block $blockNumber: $e'); + } } void _handleChainRpcError(String error) { diff --git a/miner-app/lib/src/services/node_process_manager.dart b/miner-app/lib/src/services/node_process_manager.dart index 0a04f485..3148f799 100644 --- a/miner-app/lib/src/services/node_process_manager.dart +++ b/miner-app/lib/src/services/node_process_manager.dart @@ -19,8 +19,9 @@ class NodeConfig { /// Path to the node identity key file. final File identityFile; - /// The rewards address for mining. - final String rewardsAddress; + /// The rewards preimage (first hash) for mining rewards. + /// This is passed to the node via --rewards-preimage flag. + final String rewardsPreimage; /// Chain ID to connect to ('dev' or 'dirac'). final String chainId; @@ -40,7 +41,7 @@ class NodeConfig { NodeConfig({ required this.binary, required this.identityFile, - required this.rewardsAddress, + required this.rewardsPreimage, this.chainId = 'dev', this.minerListenPort = 9833, this.rpcPort = 9933, @@ -93,14 +94,18 @@ class NodeProcessManager extends BaseProcessManager { // Validate binary exists if (!await config.binary.exists()) { - final error = MinerError.nodeStartupFailed('Node binary not found: ${config.binary.path}'); + final error = MinerError.nodeStartupFailed( + 'Node binary not found: ${config.binary.path}', + ); errorController.add(error); throw Exception(error.message); } // Validate identity file exists if (!await config.identityFile.exists()) { - final error = MinerError.nodeStartupFailed('Identity file not found: ${config.identityFile.path}'); + final error = MinerError.nodeStartupFailed( + 'Identity file not found: ${config.identityFile.path}', + ); errorController.add(error); throw Exception(error.message); } @@ -137,7 +142,7 @@ class NodeProcessManager extends BaseProcessManager { // Only use --base-path for non-dev chains (dev uses temp storage for fresh state) if (config.chainId != 'dev') ...['--base-path', basePath], '--node-key-file', config.identityFile.path, - '--rewards-address', config.rewardsAddress, + '--rewards-preimage', config.rewardsPreimage, '--validator', // Chain selection if (config.chainId == 'dev') '--dev' else ...['--chain', config.chainId], diff --git a/miner-app/lib/src/services/process_cleanup_service.dart b/miner-app/lib/src/services/process_cleanup_service.dart index a144e687..18a8b1a1 100644 --- a/miner-app/lib/src/services/process_cleanup_service.dart +++ b/miner-app/lib/src/services/process_cleanup_service.dart @@ -59,13 +59,22 @@ class ProcessCleanupService { } } - static Future _forceKillWindowsProcess(int pid, String processName) async { - final killResult = await Process.run('taskkill', ['/F', '/PID', pid.toString()]); + static Future _forceKillWindowsProcess( + int pid, + String processName, + ) async { + final killResult = await Process.run('taskkill', [ + '/F', + '/PID', + pid.toString(), + ]); if (killResult.exitCode == 0) { _log.d('Killed $processName (PID: $pid)'); } else { - _log.w('taskkill failed for $processName (PID: $pid), exit: ${killResult.exitCode}'); + _log.w( + 'taskkill failed for $processName (PID: $pid), exit: ${killResult.exitCode}', + ); } await Future.delayed(MinerConfig.processVerificationDelay); @@ -94,7 +103,9 @@ class ProcessCleanupService { if (killResult.exitCode == 0) { _log.d('Killed $processName (PID: $pid)'); } else { - _log.w('kill failed for $processName (PID: $pid), exit: ${killResult.exitCode}'); + _log.w( + 'kill failed for $processName (PID: $pid), exit: ${killResult.exitCode}', + ); } await Future.delayed(MinerConfig.processVerificationDelay); @@ -105,7 +116,9 @@ class ProcessCleanupService { _log.w('$processName (PID: $pid) may still be running'); // Try pkill as last resort - final binaryName = processName.contains('miner') ? MinerConfig.minerBinaryName : MinerConfig.nodeBinaryName; + final binaryName = processName.contains('miner') + ? MinerConfig.minerBinaryName + : MinerConfig.nodeBinaryName; await Process.run('pkill', ['-9', '-f', binaryName]); return false; } @@ -136,7 +149,8 @@ class ProcessCleanupService { try { if (Platform.isWindows) { final result = await Process.run('netstat', ['-ano']); - return result.exitCode == 0 && result.stdout.toString().contains(':$port'); + return result.exitCode == 0 && + result.stdout.toString().contains(':$port'); } else { final result = await Process.run('lsof', ['-i', ':$port']); return result.exitCode == 0 && result.stdout.toString().isNotEmpty; @@ -202,7 +216,11 @@ class ProcessCleanupService { /// Tries ports in range [startPort, startPort + MinerConfig.portSearchRange]. /// Returns the original port if no alternative is found. static Future findAvailablePort(int startPort) async { - for (int port = startPort; port <= startPort + MinerConfig.portSearchRange; port++) { + for ( + int port = startPort; + port <= startPort + MinerConfig.portSearchRange; + port++ + ) { if (!(await isPortInUse(port))) { return port; } @@ -214,7 +232,10 @@ class ProcessCleanupService { /// /// Returns a map of port names to their actual values (may differ from defaults /// if an alternative port was needed). - static Future> ensurePortsAvailable({required int quicPort, required int metricsPort}) async { + static Future> ensurePortsAvailable({ + required int quicPort, + required int metricsPort, + }) async { final result = {'quic': quicPort, 'metrics': metricsPort}; // Check QUIC port @@ -251,7 +272,11 @@ class ProcessCleanupService { static Future cleanupExistingNodeProcesses() async { try { if (Platform.isWindows) { - await Process.run('taskkill', ['/F', '/IM', MinerConfig.nodeBinaryNameWindows]); + await Process.run('taskkill', [ + '/F', + '/IM', + MinerConfig.nodeBinaryNameWindows, + ]); await Future.delayed(MinerConfig.processCleanupDelay); } else { await _cleanupUnixProcesses(MinerConfig.nodeBinaryName); @@ -265,7 +290,11 @@ class ProcessCleanupService { static Future cleanupExistingMinerProcesses() async { try { if (Platform.isWindows) { - await Process.run('taskkill', ['/F', '/IM', MinerConfig.minerBinaryNameWindows]); + await Process.run('taskkill', [ + '/F', + '/IM', + MinerConfig.minerBinaryNameWindows, + ]); await Future.delayed(MinerConfig.processCleanupDelay); } else { await _cleanupUnixProcesses(MinerConfig.minerBinaryName); @@ -310,7 +339,8 @@ class ProcessCleanupService { static Future cleanupDatabaseLocks(String chainId) async { try { final quantusHome = await BinaryManager.getQuantusHomeDirectoryPath(); - final lockFilePath = '$quantusHome/node_data/chains/$chainId/db/full/LOCK'; + final lockFilePath = + '$quantusHome/node_data/chains/$chainId/db/full/LOCK'; final lockFile = File(lockFilePath); if (await lockFile.exists()) { @@ -395,8 +425,16 @@ class ProcessCleanupService { _log.d(' Killing all quantus processes...'); if (Platform.isWindows) { - await Process.run('taskkill', ['/F', '/IM', MinerConfig.nodeBinaryNameWindows]); - await Process.run('taskkill', ['/F', '/IM', MinerConfig.minerBinaryNameWindows]); + await Process.run('taskkill', [ + '/F', + '/IM', + MinerConfig.nodeBinaryNameWindows, + ]); + await Process.run('taskkill', [ + '/F', + '/IM', + MinerConfig.minerBinaryNameWindows, + ]); } else { await Process.run('pkill', ['-9', '-f', MinerConfig.nodeBinaryName]); await Process.run('pkill', ['-9', '-f', MinerConfig.minerBinaryName]); diff --git a/miner-app/lib/src/services/prometheus_service.dart b/miner-app/lib/src/services/prometheus_service.dart index 68d45a0d..46540403 100644 --- a/miner-app/lib/src/services/prometheus_service.dart +++ b/miner-app/lib/src/services/prometheus_service.dart @@ -9,7 +9,12 @@ class PrometheusMetrics { final int? targetBlock; final int? peerCount; - PrometheusMetrics({required this.isMajorSyncing, this.bestBlock, this.targetBlock, this.peerCount}); + PrometheusMetrics({ + required this.isMajorSyncing, + this.bestBlock, + this.targetBlock, + this.peerCount, + }); @override String toString() { @@ -21,11 +26,15 @@ class PrometheusService { final String metricsUrl; PrometheusService({String? metricsUrl}) - : metricsUrl = metricsUrl ?? MinerConfig.nodePrometheusUrl(MinerConfig.defaultNodePrometheusPort); + : metricsUrl = + metricsUrl ?? + MinerConfig.nodePrometheusUrl(MinerConfig.defaultNodePrometheusPort); Future fetchMetrics() async { try { - final response = await http.get(Uri.parse(metricsUrl)).timeout(const Duration(seconds: 3)); + final response = await http + .get(Uri.parse(metricsUrl)) + .timeout(const Duration(seconds: 3)); if (response.statusCode == 200) { final lines = response.body.split('\n'); @@ -46,13 +55,17 @@ class PrometheusService { if (parts.length == 2) { bestBlock = int.tryParse(parts[1]); } - } else if (line.startsWith('substrate_block_height{status="sync_target"')) { + } else if (line.startsWith( + 'substrate_block_height{status="sync_target"', + )) { final parts = line.split(' '); if (parts.length == 2) { targetBlock = int.tryParse(parts[1]); } } else if (line.startsWith('substrate_sub_libp2p_peers_count ') || - line.startsWith('substrate_sub_libp2p_kademlia_query_duration_count ') || + line.startsWith( + 'substrate_sub_libp2p_kademlia_query_duration_count ', + ) || line.contains('substrate_sub_libp2p_connections_opened_total') || line.contains('substrate_peerset_num_discovered_peers')) { // Try various peer-related metrics @@ -71,7 +84,9 @@ class PrometheusService { if (bestBlock != null && targetBlock != null && (targetBlock - bestBlock) > 5 && - !lines.any((l) => l.startsWith('substrate_sub_libp2p_is_major_syncing'))) { + !lines.any( + (l) => l.startsWith('substrate_sub_libp2p_is_major_syncing'), + )) { // If the specific major sync metric isn't there, but there's a clear block difference, // infer syncing state. isSyncing = true; diff --git a/miner-app/lib/src/services/transfer_tracking_service.dart b/miner-app/lib/src/services/transfer_tracking_service.dart new file mode 100644 index 00000000..453b4a5c --- /dev/null +++ b/miner-app/lib/src/services/transfer_tracking_service.dart @@ -0,0 +1,509 @@ +import 'dart:convert'; +import 'dart:io'; +import 'dart:typed_data'; + +import 'package:http/http.dart' as http; +import 'package:path_provider/path_provider.dart'; +import 'package:polkadart/polkadart.dart' show Hasher; +import 'package:polkadart/scale_codec.dart' as scale; +import 'package:quantus_miner/src/utils/app_logger.dart'; +import 'package:quantus_sdk/quantus_sdk.dart'; +import 'package:quantus_sdk/generated/planck/types/frame_system/event_record.dart'; +import 'package:quantus_sdk/generated/planck/types/pallet_wormhole/pallet/event.dart' + as wormhole_event; +import 'package:quantus_sdk/generated/planck/types/quantus_runtime/runtime_event.dart' + as runtime_event; +import 'package:ss58/ss58.dart' as ss58; + +final _log = log.withTag('TransferTracking'); + +/// Information about a mining reward transfer. +/// +/// This is tracked locally when mining blocks so we can generate +/// withdrawal proofs later. +class TrackedTransfer { + final String blockHash; + final int blockNumber; + final BigInt transferCount; + final BigInt amount; + final String wormholeAddress; + final String fundingAccount; + final DateTime timestamp; + + const TrackedTransfer({ + required this.blockHash, + required this.blockNumber, + required this.transferCount, + required this.amount, + required this.wormholeAddress, + required this.fundingAccount, + required this.timestamp, + }); + + Map toJson() => { + 'blockHash': blockHash, + 'blockNumber': blockNumber, + 'transferCount': transferCount.toString(), + 'amount': amount.toString(), + 'wormholeAddress': wormholeAddress, + 'fundingAccount': fundingAccount, + 'timestamp': timestamp.toIso8601String(), + }; + + factory TrackedTransfer.fromJson(Map json) { + return TrackedTransfer( + blockHash: json['blockHash'] as String, + blockNumber: json['blockNumber'] as int, + transferCount: BigInt.parse(json['transferCount'] as String), + amount: BigInt.parse(json['amount'] as String), + wormholeAddress: json['wormholeAddress'] as String, + fundingAccount: json['fundingAccount'] as String, + timestamp: DateTime.parse(json['timestamp'] as String), + ); + } + + @override + String toString() => + 'TrackedTransfer(block: $blockNumber, count: $transferCount, amount: $amount)'; +} + +/// Service for tracking mining reward transfers. +/// +/// This service monitors mined blocks for NativeTransferred events +/// and stores them locally for later use in withdrawal proof generation. +/// +/// NOTE: This only tracks transfers that occur while the app is running. +/// Transfers made while the app is closed (e.g., direct transfers to the +/// wormhole address from another wallet) will NOT be tracked. Those would +/// require either: +/// - Scanning historical blocks on startup +/// - Using an indexer like Subsquid +/// - Manual entry of transfer details +class TransferTrackingService { + static const String _storageFileName = 'mining_transfers.json'; + + String? _rpcUrl; + /// Set of wormhole addresses to track transfers for. + final Set _trackedAddresses = {}; + int _lastProcessedBlock = 0; + + // In-memory cache of tracked transfers + final Map> _transfersByAddress = {}; + + /// Initialize the service with RPC URL and wormhole addresses to track. + void initialize({required String rpcUrl, required Set wormholeAddresses}) { + _rpcUrl = rpcUrl; + _trackedAddresses.clear(); + _trackedAddresses.addAll(wormholeAddresses); + _log.i('Initialized transfer tracking for ${wormholeAddresses.length} addresses'); + } + + /// Add a new address to track. + void addTrackedAddress(String address) { + _trackedAddresses.add(address); + _log.i('Added address to tracking: $address'); + } + + /// Get all tracked addresses. + Set get trackedAddresses => Set.unmodifiable(_trackedAddresses); + + /// Load previously tracked transfers from disk. + /// + /// If [clearForDevChain] is true, will clear any existing transfers instead + /// of loading them. Use this for dev chains that reset on each restart. + Future loadFromDisk({bool clearForDevChain = false}) async { + if (clearForDevChain) { + _log.i('Dev chain mode: clearing tracked transfers'); + await clearAllTransfers(); + return; + } + + try { + final file = await _getStorageFile(); + if (await file.exists()) { + final content = await file.readAsString(); + final data = jsonDecode(content) as Map; + + _transfersByAddress.clear(); + final transfersData = data['transfers'] as Map?; + if (transfersData != null) { + for (final entry in transfersData.entries) { + final address = entry.key; + final transfers = (entry.value as List) + .map((t) => TrackedTransfer.fromJson(t as Map)) + .toList(); + _transfersByAddress[address] = transfers; + } + } + + _lastProcessedBlock = data['lastProcessedBlock'] as int? ?? 0; + _log.i( + 'Loaded ${_transfersByAddress.values.expand((t) => t).length} transfers from disk', + ); + } + } catch (e) { + _log.e('Failed to load transfers from disk', error: e); + } + } + + /// Clear all tracked transfers and delete the storage file. + Future clearAllTransfers() async { + _transfersByAddress.clear(); + _lastProcessedBlock = 0; + try { + final file = await _getStorageFile(); + if (await file.exists()) { + await file.delete(); + _log.i('Deleted tracked transfers file'); + } + } catch (e) { + _log.e('Failed to delete transfers file', error: e); + } + } + + /// Save tracked transfers to disk. + Future saveToDisk() async { + try { + final file = await _getStorageFile(); + final data = { + 'lastProcessedBlock': _lastProcessedBlock, + 'transfers': _transfersByAddress.map( + (address, transfers) => + MapEntry(address, transfers.map((t) => t.toJson()).toList()), + ), + }; + await file.writeAsString(jsonEncode(data)); + _log.d('Saved transfers to disk'); + } catch (e) { + _log.e('Failed to save transfers to disk', error: e); + } + } + + Future _getStorageFile() async { + final appDir = await getApplicationSupportDirectory(); + final quantusDir = Directory('${appDir.path}/.quantus'); + if (!await quantusDir.exists()) { + await quantusDir.create(recursive: true); + } + return File('${quantusDir.path}/$_storageFileName'); + } + + /// Process a newly mined block to check for transfers. + /// + /// Call this when a new block is detected/mined. + Future processBlock(int blockNumber, String blockHash) async { + _log.i('processBlock called: block=$blockNumber, hash=$blockHash'); + + if (_rpcUrl == null || _trackedAddresses.isEmpty) { + _log.w( + 'Service not initialized, skipping block $blockNumber (rpcUrl=$_rpcUrl, trackedAddresses=${_trackedAddresses.length})', + ); + return; + } + + // Skip if we've already processed this block + if (blockNumber <= _lastProcessedBlock) { + _log.d( + 'Skipping block $blockNumber (already processed up to $_lastProcessedBlock)', + ); + return; + } + + _log.i('Processing block $blockNumber for transfers to ${_trackedAddresses.length} tracked addresses'); + + try { + final transfers = await _getTransfersFromBlock(blockHash); + _log.i( + 'Block $blockNumber has ${transfers.length} total wormhole transfers', + ); + + // Filter for transfers to any of our tracked wormhole addresses + final relevantTransfers = transfers + .where((t) => _trackedAddresses.contains(t.wormholeAddress)) + .toList(); + + _log.i( + 'Block $blockNumber: ${relevantTransfers.length} transfers match tracked addresses', + ); + + if (relevantTransfers.isNotEmpty) { + _log.i( + 'Found ${relevantTransfers.length} transfer(s) to tracked addresses in block $blockNumber', + ); + + // Add to in-memory cache, grouped by address + for (final transfer in relevantTransfers) { + _transfersByAddress + .putIfAbsent(transfer.wormholeAddress, () => []) + .add(transfer); + } + + // Persist to disk + await saveToDisk(); + _log.i('Saved ${relevantTransfers.length} transfers to disk'); + } + + _lastProcessedBlock = blockNumber; + } catch (e, st) { + _log.e('Failed to process block $blockNumber', error: e, stackTrace: st); + } + } + + /// Get all tracked transfers for a wormhole address. + List getTransfers(String wormholeAddress) { + return _transfersByAddress[wormholeAddress] ?? []; + } + + /// Get all tracked transfers across all addresses. + List getAllTransfers() { + return _transfersByAddress.values.expand((t) => t).toList(); + } + + /// Get total tracked balance across all addresses. + BigInt getTotalTrackedBalance() { + return getAllTransfers().fold(BigInt.zero, (sum, t) => sum + t.amount); + } + + /// Get unspent transfers for a wormhole address. + /// + /// Filters out transfers whose nullifiers have been consumed. + Future> getUnspentTransfers({ + required String wormholeAddress, + required String secretHex, + }) async { + final transfers = getTransfers(wormholeAddress); + if (transfers.isEmpty) return []; + + final wormholeService = WormholeService(); + final unspent = []; + + for (final transfer in transfers) { + final nullifier = wormholeService.computeNullifier( + secretHex: secretHex, + transferCount: transfer.transferCount, + ); + + final isConsumed = await _isNullifierConsumed(nullifier); + if (!isConsumed) { + unspent.add(transfer); + } + } + + return unspent; + } + + /// Check if a nullifier has been consumed on chain. + Future _isNullifierConsumed(String nullifierHex) async { + if (_rpcUrl == null) return false; + + try { + // Query Wormhole::UsedNullifiers storage + // Storage key: twox128("Wormhole") ++ twox128("UsedNullifiers") ++ blake2_128_concat(nullifier) + final nullifierBytes = nullifierHex.startsWith('0x') + ? nullifierHex.substring(2) + : nullifierHex; + + final modulePrefix = _twox128('Wormhole'); + final storagePrefix = _twox128('UsedNullifiers'); + final keyHash = _blake2128Concat(nullifierBytes); + + final storageKey = '0x$modulePrefix$storagePrefix$keyHash'; + + _log.d('Checking nullifier: $nullifierBytes'); + _log.d('Storage key: $storageKey'); + + final response = await http.post( + Uri.parse(_rpcUrl!), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getStorage', + 'params': [storageKey], + }), + ); + + final result = jsonDecode(response.body); + if (result['error'] != null) { + _log.e('RPC error checking nullifier: ${result['error']}'); + return false; + } + + // If storage exists and is not empty, nullifier is consumed + final value = result['result'] as String?; + final isConsumed = value != null && value != '0x' && value.isNotEmpty; + + if (isConsumed) { + _log.i('Nullifier is CONSUMED: $nullifierHex'); + } else { + _log.d('Nullifier is unspent: $nullifierHex'); + } + + return isConsumed; + } catch (e) { + _log.e('Failed to check nullifier', error: e); + return false; + } + } + + // ============================================================ + // Helper functions for storage key computation + // ============================================================ + + /// Compute twox128 hash of a string (for Substrate storage key prefixes). + String _twox128(String input) { + final bytes = Uint8List.fromList(utf8.encode(input)); + final hash = Hasher.twoxx128.hash(bytes); + return _bytesToHex(hash); + } + + /// Compute blake2b-128 hash concatenated with input (for Substrate storage keys). + /// Returns: blake2b_128(input) ++ input + String _blake2128Concat(String hexInput) { + final bytes = _hexToBytes(hexInput); + final hash = Hasher.blake2b128.hash(bytes); + return _bytesToHex(hash) + _bytesToHex(bytes); + } + + /// Get transfers from a block by querying events. + Future> _getTransfersFromBlock(String blockHash) async { + if (_rpcUrl == null) { + _log.w('_getTransfersFromBlock: rpcUrl is null'); + return []; + } + + try { + // Query System::Events storage at the block + _log.d('Fetching events for block $blockHash from $_rpcUrl'); + final eventsHex = await _getBlockEvents(blockHash); + if (eventsHex == null || eventsHex.isEmpty) { + _log.d('No events found for block $blockHash'); + return []; + } + + _log.d('Got events data: ${eventsHex.length} chars'); + + // Decode events and extract NativeTransferred + return _decodeNativeTransferredEvents(eventsHex, blockHash); + } catch (e, st) { + _log.e('Failed to get transfers from block', error: e, stackTrace: st); + return []; + } + } + + /// Get raw events storage for a block. + Future _getBlockEvents(String blockHash) async { + // Storage key for System::Events + // twox128("System") ++ twox128("Events") + const storageKey = + '0x26aa394eea5630e07c48ae0c9558cef780d41e5e16056765bc8461851072c9d7'; + + final response = await http.post( + Uri.parse(_rpcUrl!), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getStorage', + 'params': [storageKey, blockHash], + }), + ); + + final result = jsonDecode(response.body); + if (result['error'] != null) { + _log.e('RPC error: ${result['error']}'); + return null; + } + + return result['result'] as String?; + } + + /// Decode NativeTransferred events from raw events data using generated Polkadart types. + /// + /// The events are SCALE-encoded as Vec>. + /// We look for Wormhole::NativeTransferred events. + List _decodeNativeTransferredEvents( + String eventsHex, + String blockHash, + ) { + final transfers = []; + + try { + final bytes = _hexToBytes(eventsHex); + final input = scale.ByteInput(bytes); + + // Decode Vec + final numEvents = scale.CompactCodec.codec.decode(input); + _log.d('Block has $numEvents events'); + + for (var i = 0; i < numEvents; i++) { + try { + // Use the generated EventRecord codec to decode each event + final eventRecord = EventRecord.decode(input); + + // Check if this is a Wormhole event + final event = eventRecord.event; + _log.d('Event $i: ${event.runtimeType}'); + + if (event is runtime_event.Wormhole) { + final wormholeEvent = event.value0; + _log.i('Found Wormhole event: ${wormholeEvent.runtimeType}'); + + // Check if it's a NativeTransferred event + if (wormholeEvent is wormhole_event.NativeTransferred) { + final toSs58 = _accountIdToSs58( + Uint8List.fromList(wormholeEvent.to), + ); + final fromSs58 = _accountIdToSs58( + Uint8List.fromList(wormholeEvent.from), + ); + + _log.i( + 'Found NativeTransferred: to=$toSs58, amount=${wormholeEvent.amount}, count=${wormholeEvent.transferCount}', + ); + + transfers.add( + TrackedTransfer( + blockHash: blockHash, + blockNumber: 0, // Will be filled in by caller + transferCount: wormholeEvent.transferCount, + amount: wormholeEvent.amount, + wormholeAddress: toSs58, + fundingAccount: fromSs58, + timestamp: DateTime.now(), + ), + ); + } + } + } catch (e) { + _log.w('Failed to decode event $i: $e'); + // Continue trying to decode remaining events + } + } + } catch (e) { + _log.e('Failed to decode events', error: e); + } + + return transfers; + } + + /// Convert AccountId32 bytes to SS58 address with Quantus prefix (189). + String _accountIdToSs58(Uint8List accountId) { + // Use ss58 package to encode with Quantus network prefix (189) + const quantusPrefix = 189; + return ss58.Address(prefix: quantusPrefix, pubkey: accountId).encode(); + } + + Uint8List _hexToBytes(String hex) { + final str = hex.startsWith('0x') ? hex.substring(2) : hex; + final result = Uint8List(str.length ~/ 2); + for (var i = 0; i < result.length; i++) { + result[i] = int.parse(str.substring(i * 2, i * 2 + 2), radix: 16); + } + return result; + } + + String _bytesToHex(Uint8List bytes) { + return bytes.map((b) => b.toRadixString(16).padLeft(2, '0')).join(); + } +} diff --git a/miner-app/lib/src/services/withdrawal_service.dart b/miner-app/lib/src/services/withdrawal_service.dart new file mode 100644 index 00000000..45734bbf --- /dev/null +++ b/miner-app/lib/src/services/withdrawal_service.dart @@ -0,0 +1,1530 @@ +import 'dart:convert'; +import 'dart:typed_data'; + +import 'package:http/http.dart' as http; +import 'package:polkadart/polkadart.dart' show Hasher; +import 'package:polkadart/scale_codec.dart' as scale; +import 'package:quantus_miner/src/services/miner_settings_service.dart'; +import 'package:quantus_miner/src/services/transfer_tracking_service.dart'; +import 'package:quantus_miner/src/services/wormhole_address_manager.dart'; +import 'package:quantus_miner/src/utils/app_logger.dart'; +import 'package:quantus_sdk/quantus_sdk.dart' + hide WormholeAddressManager, TrackedWormholeAddress, WormholeAddressPurpose; +import 'package:quantus_sdk/generated/planck/planck.dart'; +import 'package:quantus_sdk/generated/planck/types/frame_system/event_record.dart'; +import 'package:quantus_sdk/generated/planck/types/pallet_wormhole/pallet/call.dart' + as wormhole_call; +import 'package:quantus_sdk/generated/planck/types/pallet_wormhole/pallet/event.dart' + as wormhole_event; +import 'package:quantus_sdk/generated/planck/types/quantus_runtime/runtime_event.dart' + as runtime_event; +import 'package:quantus_sdk/generated/planck/types/sp_runtime/dispatch_error.dart' + as dispatch_error; +import 'package:quantus_sdk/generated/planck/types/frame_system/pallet/event.dart' + as system_event; +import 'package:ss58/ss58.dart' as ss58; + +final _log = log.withTag('Withdrawal'); + +/// Progress callback for withdrawal operations. +typedef WithdrawalProgressCallback = + void Function(double progress, String message); + +/// Result of a withdrawal operation. +class WithdrawalResult { + final bool success; + final String? txHash; + final String? error; + final BigInt? exitAmount; + + /// If change was generated, this is the address where it was sent. + final String? changeAddress; + + /// The amount sent to the change address (in planck). + final BigInt? changeAmount; + + const WithdrawalResult({ + required this.success, + this.txHash, + this.error, + this.exitAmount, + this.changeAddress, + this.changeAmount, + }); +} + +/// Information about a transfer needed for proof generation. +/// Mirrors the CLI's TransferInfo struct. +class TransferInfo { + final String blockHash; + final BigInt transferCount; + final BigInt amount; + final String wormholeAddress; + final String fundingAccount; + + const TransferInfo({ + required this.blockHash, + required this.transferCount, + required this.amount, + required this.wormholeAddress, + required this.fundingAccount, + }); + + @override + String toString() => + 'TransferInfo(blockHash: $blockHash, transferCount: $transferCount, amount: $amount)'; +} + +/// Service for handling wormhole withdrawals. +/// +/// This orchestrates the entire withdrawal flow: +/// 1. Query chain for transfer count and transfer proofs +/// 2. For each transfer: fetch storage proof and generate ZK proof +/// 3. Aggregate proofs +/// 4. Submit transaction to chain +class WithdrawalService { + final _settingsService = MinerSettingsService(); + + // Fee in basis points (10 = 0.1%) + static const int feeBps = 10; + + // Minimum output after quantization (3 units = 0.03 QTN) + static final BigInt minOutputPlanck = + BigInt.from(3) * BigInt.from(10).pow(10); + + // Native asset ID (0 for native token) + static const int nativeAssetId = 0; + + // Default batch size (number of proofs per aggregation) + // This should match the circuit config, but 16 is the current standard. + static const int defaultBatchSize = 16; + + /// Withdraw funds from a wormhole address. + /// + /// [secretHex] - The wormhole secret for proof generation + /// [wormholeAddress] - The source wormhole address (SS58) + /// [destinationAddress] - Where to send the withdrawn funds (SS58) + /// [amount] - Amount to withdraw in planck (null = withdraw all) + /// [circuitBinsDir] - Directory containing circuit binary files + /// [trackedTransfers] - Optional pre-tracked transfers with exact amounts (from TransferTrackingService) + /// [addressManager] - Optional address manager for deriving change addresses + /// [onProgress] - Progress callback for UI updates + Future withdraw({ + required String secretHex, + required String wormholeAddress, + required String destinationAddress, + BigInt? amount, + required String circuitBinsDir, + List? trackedTransfers, + WormholeAddressManager? addressManager, + WithdrawalProgressCallback? onProgress, + }) async { + try { + final chainConfig = await _settingsService.getChainConfig(); + final rpcUrl = chainConfig.rpcUrl; + + onProgress?.call(0.05, 'Querying chain for transfers...'); + + // 1. Get transfers - use tracked transfers if available (have exact amounts), + // otherwise fall back to chain query (estimates amounts) + final List transfers; + if (trackedTransfers != null && trackedTransfers.isNotEmpty) { + _log.i( + 'Using ${trackedTransfers.length} pre-tracked transfers with exact amounts', + ); + transfers = trackedTransfers + .map( + (t) => TransferInfo( + blockHash: t.blockHash, + transferCount: t.transferCount, + amount: t.amount, + wormholeAddress: t.wormholeAddress, + fundingAccount: t.fundingAccount, + ), + ) + .toList(); + } else { + _log.w( + 'No tracked transfers available, falling back to chain query (amounts may be estimated)', + ); + transfers = await _getTransfersFromChain( + rpcUrl: rpcUrl, + wormholeAddress: wormholeAddress, + secretHex: secretHex, + ); + } + + if (transfers.isEmpty) { + return const WithdrawalResult( + success: false, + error: 'No unspent transfers found for this wormhole address', + ); + } + + // Calculate total available + final totalAvailable = transfers.fold( + BigInt.zero, + (sum, t) => sum + t.amount, + ); + _log.i( + 'Total available: $totalAvailable planck (${transfers.length} transfers)', + ); + + // Determine amount to withdraw + final withdrawAmount = amount ?? totalAvailable; + if (withdrawAmount > totalAvailable) { + return WithdrawalResult( + success: false, + error: + 'Insufficient balance. Available: $totalAvailable, requested: $withdrawAmount', + ); + } + + onProgress?.call(0.1, 'Selecting transfers...'); + + // 2. Select transfers (for now, use all - simplest approach) + final selectedTransfers = _selectTransfers(transfers, withdrawAmount); + final selectedTotal = selectedTransfers.fold( + BigInt.zero, + (sum, t) => sum + t.amount, + ); + + _log.i( + 'Selected ${selectedTransfers.length} transfers totaling $selectedTotal planck', + ); + + // Calculate output amounts after fee + final totalAfterFee = + selectedTotal - + (selectedTotal * BigInt.from(feeBps) ~/ BigInt.from(10000)); + + if (totalAfterFee < minOutputPlanck) { + return const WithdrawalResult( + success: false, + error: 'Amount too small after fee (minimum ~0.03 QTN)', + ); + } + + onProgress?.call(0.15, 'Loading circuit data...'); + + // 3. Create proof generator (this loads ~171MB of circuit data) + final wormholeService = WormholeService(); + final generator = await wormholeService.createProofGenerator( + circuitBinsDir, + ); + final aggregator = await wormholeService.createProofAggregator( + circuitBinsDir, + ); + + onProgress?.call(0.18, 'Fetching current block...'); + + // 4. Get the current best block hash - ALL proofs must use the same block + // This is required by the aggregation circuit which enforces all proofs + // reference the same storage state snapshot. + final proofBlockHash = await _fetchBestBlockHash(rpcUrl); + _log.i('Using block $proofBlockHash for all proofs'); + + // Calculate if we need change + // Change is needed when we're withdrawing less than the total available after fees + final requestedAmountQuantized = wormholeService.quantizeAmount( + withdrawAmount, + ); + + // Calculate max possible outputs for each transfer (after fee deduction) + final maxOutputsQuantized = selectedTransfers.map((t) { + final inputQuantized = wormholeService.quantizeAmount(t.amount); + return wormholeService.computeOutputAmount(inputQuantized, feeBps); + }).toList(); + final totalMaxOutputQuantized = maxOutputsQuantized.fold( + 0, + (a, b) => a + b, + ); + + // Determine if change is needed + final needsChange = requestedAmountQuantized < totalMaxOutputQuantized; + String? changeAddress; + TrackedWormholeAddress? changeAddressInfo; + + if (needsChange) { + if (addressManager == null) { + return const WithdrawalResult( + success: false, + error: + 'Partial withdrawal requires address manager for change address', + ); + } + + onProgress?.call(0.19, 'Deriving change address...'); + changeAddressInfo = await addressManager.deriveNextChangeAddress(); + changeAddress = changeAddressInfo.address; + _log.i('Change address: $changeAddress'); + } + + onProgress?.call(0.2, 'Generating proofs...'); + + // 5. Generate proofs for each transfer + // If change is needed, the last transfer sends remaining to change address + final proofs = []; + var remainingToSend = requestedAmountQuantized; + + for (int i = 0; i < selectedTransfers.length; i++) { + final transfer = selectedTransfers[i]; + final maxOutput = maxOutputsQuantized[i]; + final isLastTransfer = i == selectedTransfers.length - 1; + + final progress = 0.2 + (0.5 * (i / selectedTransfers.length)); + onProgress?.call( + progress, + 'Generating proof ${i + 1}/${selectedTransfers.length}...', + ); + + // Determine output and change amounts for this proof + int outputAmount; + int changeAmount = 0; + + if (isLastTransfer && needsChange) { + // Last transfer: send remaining to destination, rest to change + outputAmount = remainingToSend; + changeAmount = maxOutput - outputAmount; + if (changeAmount < 0) changeAmount = 0; + } else if (needsChange) { + // Not last transfer: send min of maxOutput or remaining + outputAmount = remainingToSend < maxOutput + ? remainingToSend + : maxOutput; + } else { + // No change needed: send max output + outputAmount = maxOutput; + } + + remainingToSend -= outputAmount; + + try { + final proof = await _generateProofForTransfer( + generator: generator, + wormholeService: wormholeService, + transfer: transfer, + secretHex: secretHex, + destinationAddress: destinationAddress, + rpcUrl: rpcUrl, + proofBlockHash: proofBlockHash, + outputAmount: needsChange ? outputAmount : null, + changeAmount: changeAmount, + changeAddress: changeAddress, + ); + proofs.add(proof); + } catch (e) { + _log.e( + 'Failed to generate proof for transfer ${transfer.transferCount}', + error: e, + ); + return WithdrawalResult( + success: false, + error: 'Failed to generate proof: $e', + ); + } + } + + // 5. Get the batch size from the aggregator + final batchSize = await aggregator.batchSize; + _log.i('Circuit batch size: $batchSize proofs per aggregation'); + + // 6. Split proofs into batches if needed + final numBatches = (proofs.length + batchSize - 1) ~/ batchSize; + _log.i('Splitting ${proofs.length} proofs into $numBatches batch(es)'); + + final txHashes = []; + + for (int batchIdx = 0; batchIdx < numBatches; batchIdx++) { + final batchStart = batchIdx * batchSize; + final batchEnd = (batchStart + batchSize).clamp(0, proofs.length); + final batchProofs = proofs.sublist(batchStart, batchEnd); + + final aggregateProgress = 0.7 + (0.1 * (batchIdx / numBatches)); + onProgress?.call( + aggregateProgress, + 'Aggregating batch ${batchIdx + 1}/$numBatches (${batchProofs.length} proofs)...', + ); + + // Clear aggregator and add proofs for this batch + await aggregator.clear(); + for (final proof in batchProofs) { + await aggregator.addGeneratedProof(proof); + } + final aggregatedProof = await aggregator.aggregate(); + + _log.i( + 'Batch ${batchIdx + 1}: Aggregated ${aggregatedProof.numRealProofs} proofs', + ); + + final submitProgress = 0.8 + (0.15 * (batchIdx / numBatches)); + onProgress?.call( + submitProgress, + 'Submitting batch ${batchIdx + 1}/$numBatches...', + ); + + // Submit this batch + final txHash = await _submitProof(proofHex: aggregatedProof.proofHex); + txHashes.add(txHash); + _log.i('Batch ${batchIdx + 1} submitted: $txHash'); + } + + onProgress?.call(0.95, 'Waiting for confirmations...'); + + // 7. Wait for all transactions to be confirmed + // For simplicity, we wait for the last one (all should be in same or adjacent blocks) + final lastTxHash = txHashes.last; + final confirmed = await _waitForTransactionConfirmation( + txHash: lastTxHash, + rpcUrl: rpcUrl, + destinationAddress: destinationAddress, + expectedAmount: totalAfterFee, + ); + + if (!confirmed) { + return WithdrawalResult( + success: false, + txHash: txHashes.join(', '), + error: + 'Transactions submitted but could not confirm success. Check txs: ${txHashes.join(', ')}', + ); + } + + onProgress?.call(1.0, 'Withdrawal complete!'); + + // Calculate change amount in planck if change was used + BigInt? changeAmountPlanck; + if (needsChange && changeAddress != null) { + final changeQuantized = + totalMaxOutputQuantized - requestedAmountQuantized; + changeAmountPlanck = wormholeService.dequantizeAmount(changeQuantized); + } + + return WithdrawalResult( + success: true, + txHash: txHashes.join(', '), + exitAmount: totalAfterFee, + changeAddress: changeAddress, + changeAmount: changeAmountPlanck, + ); + } catch (e) { + _log.e('Withdrawal failed', error: e); + return WithdrawalResult(success: false, error: e.toString()); + } + } + + /// Get transfers to a wormhole address by querying chain storage. + /// + /// NOTE: This fallback method is not fully implemented and will fail. + /// Tracked transfers from TransferTrackingService should be used instead. + Future> _getTransfersFromChain({ + required String rpcUrl, + required String wormholeAddress, + required String secretHex, + }) async { + _log.e( + 'Chain query fallback is not implemented - transfers must be tracked while mining', + ); + throw Exception( + 'No tracked transfers available. Mining rewards can only be withdrawn ' + 'for blocks mined while the app was open. Please mine some blocks first.', + ); + + // Get the minting account (source for mining rewards) + final mintingAccount = await _getMintingAccount(rpcUrl); + _log.i('Minting account: $mintingAccount'); + + // Get transfer count for this address + final transferCount = await _getTransferCount(rpcUrl, wormholeAddress); + _log.i('Transfer count: $transferCount'); + + if (transferCount == 0) { + return []; + } + + // Get consumed nullifiers to filter out spent transfers + final wormholeService = WormholeService(); + final consumedNullifiers = {}; + + for (var i = BigInt.one; i <= BigInt.from(transferCount); i += BigInt.one) { + final nullifier = wormholeService.computeNullifier( + secretHex: secretHex, + transferCount: i, + ); + final isConsumed = await _isNullifierConsumed(rpcUrl, nullifier); + if (isConsumed) { + consumedNullifiers.add(nullifier); + } + } + _log.i('Found ${consumedNullifiers.length} consumed nullifiers'); + + // For each unspent transfer, we need to find the block and amount + // This requires scanning events or having indexed data + // For mining rewards, we can query the TransferProof storage directly + final transfers = []; + + for (var i = BigInt.one; i <= BigInt.from(transferCount); i += BigInt.one) { + final nullifier = wormholeService.computeNullifier( + secretHex: secretHex, + transferCount: i, + ); + if (consumedNullifiers.contains(nullifier)) { + _log.d('Transfer $i already spent (nullifier consumed)'); + continue; + } + + // Query the transfer proof to get the amount + final transferInfo = await _getTransferProofInfo( + rpcUrl: rpcUrl, + wormholeAddress: wormholeAddress, + mintingAccount: mintingAccount, + transferCount: i, + ); + + if (transferInfo != null) { + transfers.add(transferInfo); + } + } + + return transfers; + } + + /// Get the minting account from chain constants. + Future _getMintingAccount(String rpcUrl) async { + // Get the minting account from the generated Planck constants + // This is PalletId(*b"wormhole").into_account_truncating() + final mintingAccountBytes = Planck.url( + Uri.parse(rpcUrl), + ).constant.wormhole.mintingAccount; + return _accountIdToSs58(Uint8List.fromList(mintingAccountBytes)); + } + + /// Get the transfer count for a wormhole address. + Future _getTransferCount(String rpcUrl, String wormholeAddress) async { + // Query Wormhole::TransferCount storage + // Storage key: twox128("Wormhole") ++ twox128("TransferCount") ++ blake2_128_concat(address) + + final accountId = _ss58ToHex(wormholeAddress); + + // Build storage key for TransferCount + // Wormhole module prefix: twox128("Wormhole") + // Storage item: twox128("TransferCount") + // Key: blake2_128_concat(account_id) + final modulePrefix = _twox128('Wormhole'); + final storagePrefix = _twox128('TransferCount'); + final keyHash = _blake2128Concat(accountId); + + final storageKey = '0x$modulePrefix$storagePrefix$keyHash'; + + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getStorage', + 'params': [storageKey], + }), + ); + + final result = jsonDecode(response.body); + if (result['error'] != null) { + throw Exception('RPC error: ${result['error']}'); + } + + final value = result['result'] as String?; + if (value == null || value == '0x' || value.isEmpty) { + return 0; + } + + // Decode SCALE-encoded u64 + final bytes = _hexToBytes(value.substring(2)); + return _decodeU64(bytes); + } + + /// Check if a nullifier has been consumed. + Future _isNullifierConsumed(String rpcUrl, String nullifierHex) async { + // Query Wormhole::UsedNullifiers storage + final nullifierBytes = nullifierHex.startsWith('0x') + ? nullifierHex.substring(2) + : nullifierHex; + + final modulePrefix = _twox128('Wormhole'); + final storagePrefix = _twox128('UsedNullifiers'); + final keyHash = _blake2128Concat(nullifierBytes); + + final storageKey = '0x$modulePrefix$storagePrefix$keyHash'; + + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getStorage', + 'params': [storageKey], + }), + ); + + final result = jsonDecode(response.body); + if (result['error'] != null) { + throw Exception('RPC error: ${result['error']}'); + } + + // If storage exists and is true, nullifier is consumed + final value = result['result'] as String?; + return value != null && value != '0x' && value.isNotEmpty; + } + + /// Get transfer proof info from chain. + /// + /// For mining rewards, we use the chain's finalized block as the proof block + /// and estimate the amount based on the balance query. + /// + /// TODO: Implement proper event indexing or use Subsquid when available. + Future _getTransferProofInfo({ + required String rpcUrl, + required String wormholeAddress, + required String mintingAccount, + required BigInt transferCount, + }) async { + _log.d( + 'Getting transfer info for transfer $transferCount to $wormholeAddress', + ); + + // Get a recent finalized block to use as the proof block + final blockHash = await _getFinalizedBlockHash(rpcUrl); + if (blockHash == null) { + _log.e('Could not get finalized block hash'); + return null; + } + + // For mining rewards, we need to estimate the amount. + // Since we can't easily decode events, we'll query the balance and assume + // it's evenly distributed across transfers (this is a simplification). + // + // In practice, mining rewards vary per block based on remaining supply. + // A proper implementation would store transfer amounts when blocks are mined. + final substrateService = SubstrateService(); + final totalBalance = await substrateService.queryBalanceRaw( + wormholeAddress, + ); + + // Get total transfer count + final totalTransfers = await _getTransferCount(rpcUrl, wormholeAddress); + + if (totalTransfers == 0) { + _log.w('No transfers found'); + return null; + } + + // Estimate amount per transfer (simplified - assumes equal distribution) + // This will likely fail for actual withdrawals because the amount must match exactly. + // For now, this is a placeholder that shows the flow works. + final estimatedAmount = totalBalance ~/ BigInt.from(totalTransfers); + + _log.i( + 'Estimated amount for transfer $transferCount: $estimatedAmount planck', + ); + _log.w( + 'NOTE: Amount estimation may not match actual transfer amount. ' + 'Proper implementation requires tracking transfer amounts when mined.', + ); + + return TransferInfo( + blockHash: blockHash, + transferCount: transferCount, + amount: estimatedAmount, + wormholeAddress: wormholeAddress, + fundingAccount: mintingAccount, + ); + } + + /// Get the finalized block hash. + Future _getFinalizedBlockHash(String rpcUrl) async { + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getFinalizedHead', + 'params': [], + }), + ); + + final result = jsonDecode(response.body); + if (result['error'] != null) { + return null; + } + return result['result'] as String?; + } + + /// Select transfers to cover the target amount. + List _selectTransfers( + List available, + BigInt targetAmount, + ) { + // Sort by amount descending (largest first) + final sorted = List.from(available) + ..sort((a, b) => b.amount.compareTo(a.amount)); + + final selected = []; + var total = BigInt.zero; + + for (final transfer in sorted) { + if (total >= targetAmount) break; + selected.add(transfer); + total += transfer.amount; + } + + return selected; + } + + /// Generate a ZK proof for a single transfer. + /// + /// [proofBlockHash] - The block hash to use for the storage proof. All proofs + /// in an aggregation batch MUST use the same block hash. This should be the + /// current best block, not the block where the transfer originally occurred. + /// + /// [outputAmount] - Optional override for output amount (quantized). If not provided, + /// uses the full amount after fee deduction. + /// + /// [changeAmount] - Optional change amount (quantized). If > 0, sends this amount + /// to [changeAddress]. + /// + /// [changeAddress] - Address to send change to (required if changeAmount > 0). + Future _generateProofForTransfer({ + required WormholeProofGenerator generator, + required WormholeService wormholeService, + required TransferInfo transfer, + required String secretHex, + required String destinationAddress, + required String rpcUrl, + required String proofBlockHash, + int? outputAmount, + int changeAmount = 0, + String? changeAddress, + }) async { + // Use the common proof block hash for storage proof (required by aggregation circuit) + final blockHash = proofBlockHash.startsWith('0x') + ? proofBlockHash + : '0x$proofBlockHash'; + + // Get block header for the proof block (not the original transfer block) + final blockHeader = await _fetchBlockHeader(rpcUrl, blockHash); + + // Get storage proof for this transfer at the proof block + final storageProof = await _fetchStorageProof( + rpcUrl: rpcUrl, + blockHash: blockHash, + transfer: transfer, + secretHex: secretHex, + ); + + // Quantize the amount for the circuit + final quantizedInputAmount = wormholeService.quantizeAmount( + transfer.amount, + ); + + // Compute the max output amount after fee deduction + // The circuit enforces: output <= input * (10000 - fee_bps) / 10000 + final maxOutputAmount = wormholeService.computeOutputAmount( + quantizedInputAmount, + feeBps, + ); + + // Use provided output amount or default to max + final quantizedOutputAmount = outputAmount ?? maxOutputAmount; + + // Validate that output + change doesn't exceed max + if (quantizedOutputAmount + changeAmount > maxOutputAmount) { + throw ArgumentError( + 'Output ($quantizedOutputAmount) + change ($changeAmount) exceeds max allowed ($maxOutputAmount)', + ); + } + + _log.i('=== Proof Generation Inputs ==='); + _log.i(' Transfer amount (planck): ${transfer.amount}'); + _log.i(' Quantized input amount: $quantizedInputAmount'); + _log.i(' Max output amount (after fee): $maxOutputAmount'); + _log.i(' Output amount: $quantizedOutputAmount'); + _log.i(' Change amount: $changeAmount'); + _log.i(' Transfer count: ${transfer.transferCount}'); + _log.i(' Block number: ${blockHeader.blockNumber}'); + _log.i(' Fee BPS: $feeBps'); + _log.i(' Digest length: ${blockHeader.digestHex.length} chars'); + _log.i(' Storage proof nodes: ${storageProof.proofNodesHex.length}'); + + // Create the UTXO + final fundingAccountHex = _ss58ToHex(transfer.fundingAccount); + final utxo = WormholeUtxo( + secretHex: secretHex, + amount: transfer.amount, + transferCount: transfer.transferCount, + fundingAccountHex: fundingAccountHex, + blockHashHex: blockHash, + ); + + _log.i(' Funding account hex: $fundingAccountHex'); + _log.i(' Block hash: $blockHash'); + + // Create output assignment + final ProofOutput output; + if (changeAmount > 0 && changeAddress != null) { + output = ProofOutput.withChange( + amount: quantizedOutputAmount, + exitAccount: destinationAddress, + changeAmount: changeAmount, + changeAccount: changeAddress, + ); + _log.i(' Exit account: $destinationAddress'); + _log.i(' Change account: $changeAddress'); + } else { + output = ProofOutput.single( + amount: quantizedOutputAmount, + exitAccount: destinationAddress, + ); + _log.i(' Exit account: $destinationAddress'); + } + _log.i('==============================='); + + // Generate the proof + return await generator.generateProof( + utxo: utxo, + output: output, + feeBps: feeBps, + blockHeader: blockHeader, + storageProof: storageProof, + ); + } + + /// Fetch the current best (latest) block hash from the chain. + /// + /// All proofs in an aggregation batch must use the same block hash for their + /// storage proofs. This ensures all proofs reference the same chain state. + Future _fetchBestBlockHash(String rpcUrl) async { + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getBlockHash', + 'params': [], // Empty params returns the best block hash + }), + ); + + if (response.statusCode != 200) { + throw Exception( + 'Failed to fetch best block hash: ${response.statusCode}', + ); + } + + final result = jsonDecode(response.body); + if (result['error'] != null) { + throw Exception('RPC error fetching best block hash: ${result['error']}'); + } + + final blockHash = result['result'] as String?; + if (blockHash == null) { + throw Exception('No best block hash returned from chain'); + } + + _log.d('Got best block hash: $blockHash'); + return blockHash; + } + + /// Fetch block header from RPC. + Future _fetchBlockHeader(String rpcUrl, String blockHash) async { + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getHeader', + 'params': [blockHash], + }), + ); + + if (response.statusCode != 200) { + throw Exception('Failed to fetch block header: ${response.statusCode}'); + } + + final result = jsonDecode(response.body); + if (result['error'] != null) { + throw Exception( + 'RPC error fetching header for $blockHash: ${result['error']}', + ); + } + + final header = result['result']; + if (header == null) { + throw Exception( + 'Block not found: $blockHash - the block may have been pruned or the chain was reset', + ); + } + + _log.d('Got block header: number=${header['number']}'); + + // Use SDK to properly encode digest from RPC logs + // This ensures correct SCALE encoding with proper padding to 110 bytes + final digestLogs = (header['digest']['logs'] as List? ?? []) + .cast() + .toList(); + final wormholeService = WormholeService(); + final digestHex = wormholeService.encodeDigestFromRpcLogs( + logsHex: digestLogs, + ); + + return BlockHeader( + parentHashHex: header['parentHash'] as String, + stateRootHex: header['stateRoot'] as String, + extrinsicsRootHex: header['extrinsicsRoot'] as String, + blockNumber: int.parse( + (header['number'] as String).substring(2), + radix: 16, + ), + digestHex: digestHex, + ); + } + + /// Fetch storage proof for a transfer. + /// + /// Uses the Poseidon-based storage key computation from the SDK to get + /// the correct storage key for the TransferProof entry. + Future _fetchStorageProof({ + required String rpcUrl, + required String blockHash, + required TransferInfo transfer, + required String secretHex, + }) async { + _log.d('Fetching storage proof for transfer ${transfer.transferCount}'); + _log.d(' secretHex: ${secretHex.substring(0, 10)}...'); + _log.d(' transferCount: ${transfer.transferCount}'); + _log.d(' fundingAccount: ${transfer.fundingAccount}'); + _log.d(' amount: ${transfer.amount}'); + + // Compute the storage key using Poseidon hash (same as chain uses) + // The key includes: asset_id (0), transfer_count, from, to, amount + final wormholeService = WormholeService(); + final String storageKey; + try { + storageKey = wormholeService.computeTransferProofStorageKey( + secretHex: secretHex, + transferCount: transfer.transferCount, + fundingAccount: transfer.fundingAccount, + amount: transfer.amount, + ); + } catch (e) { + // Extract message from WormholeError if possible + final message = e is Exception ? e.toString() : 'Unknown error'; + _log.e('Failed to compute storage key: $message'); + // Try to get the message field if it's a WormholeError + final errorMessage = (e as dynamic).message?.toString() ?? e.toString(); + throw Exception('Failed to compute storage key: $errorMessage'); + } + + _log.d('Storage key: $storageKey'); + + // Fetch the read proof from chain + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getReadProof', + 'params': [ + [storageKey], + blockHash, + ], + }), + ); + + if (response.statusCode != 200) { + throw Exception('Failed to fetch storage proof: ${response.statusCode}'); + } + + final result = jsonDecode(response.body); + if (result['error'] != null) { + throw Exception('RPC error: ${result['error']}'); + } + + final proof = result['result']; + final proofNodes = (proof['proof'] as List) + .map((p) => p as String) + .toList(); + + if (proofNodes.isEmpty) { + throw Exception( + 'Empty storage proof - transfer may not exist at this block', + ); + } + + _log.d('Got ${proofNodes.length} proof nodes'); + + // Get state root from block header + final headerResponse = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getHeader', + 'params': [blockHash], + }), + ); + + final headerResult = jsonDecode(headerResponse.body); + if (headerResult['error'] != null) { + throw Exception('Failed to get block header: ${headerResult['error']}'); + } + + final stateRoot = headerResult['result']['stateRoot'] as String; + _log.d('State root: $stateRoot'); + + return StorageProof(proofNodesHex: proofNodes, stateRootHex: stateRoot); + } + + /// Submit aggregated proof to chain as an unsigned extrinsic. + /// + /// The Wormhole::verify_aggregated_proof call is designed to be submitted + /// unsigned - the proof itself provides cryptographic verification. + Future _submitProof({required String proofHex}) async { + _log.i('Proof length: ${proofHex.length} chars'); + + final proofBytes = _hexToBytes( + proofHex.startsWith('0x') ? proofHex.substring(2) : proofHex, + ); + + final call = RuntimeCall.values.wormhole( + wormhole_call.VerifyAggregatedProof(proofBytes: proofBytes), + ); + + final txHash = await SubstrateService().submitUnsignedExtrinsic(call); + final txHashHex = '0x${_bytesToHex(txHash)}'; + _log.i('Transaction submitted: $txHashHex'); + return txHashHex; + } + + /// Check events in a specific block for wormhole activity. + /// This is useful for debugging - call it with a known block hash. + Future debugBlockEvents(String rpcUrl, String blockHash) async { + _log.i('=== DEBUG: Checking events in block $blockHash ==='); + + final eventsKey = '0x${_twox128('System')}${_twox128('Events')}'; + final eventsResponse = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getStorage', + 'params': [eventsKey, blockHash], + }), + ); + final eventsResult = jsonDecode(eventsResponse.body); + final eventsHex = eventsResult['result'] as String?; + + if (eventsHex == null) { + _log.w('No events found'); + return; + } + + _log.i('Events hex length: ${eventsHex.length}'); + _parseAndLogAllEvents(eventsHex); + } + + /// Parse and log all events in a block (for debugging). + void _parseAndLogAllEvents(String eventsHex) { + final bytes = _hexToBytes(eventsHex.substring(2)); + _log.d('Total events data: ${bytes.length} bytes'); + + // Scan for event patterns + for (var i = 0; i < bytes.length - 4; i++) { + // Look for ApplyExtrinsic phase (0x00) + if (bytes[i] == 0x00) { + final compactByte = bytes[i + 1]; + if (compactByte & 0x03 == 0) { + final extrinsicIdx = compactByte >> 2; + if (i + 3 < bytes.length) { + final palletIndex = bytes[i + 2]; + final eventIndex = bytes[i + 3]; + + // Log interesting events + String eventName = 'Pallet$palletIndex.Event$eventIndex'; + if (palletIndex == 0) { + eventName = eventIndex == 0 + ? 'System.ExtrinsicSuccess' + : eventIndex == 1 + ? 'System.ExtrinsicFailed' + : eventName; + } else if (palletIndex == 20) { + eventName = 'Wormhole.Event$eventIndex'; + } else if (palletIndex == 4) { + eventName = eventIndex == 2 + ? 'Balances.Transfer' + : 'Balances.Event$eventIndex'; + } + + if (palletIndex == 0 || palletIndex == 20 || palletIndex == 4) { + _log.i(' [Ext $extrinsicIdx] $eventName'); + } + } + } + } + } + } + + /// Wait for a transaction to be included in a block and check events. + /// + /// Polls for new blocks and looks for wormhole extrinsics, then examines + /// the events to determine success or failure. + Future _waitForTransactionConfirmation({ + required String txHash, + required String rpcUrl, + required String destinationAddress, + required BigInt expectedAmount, + int maxAttempts = 30, + Duration pollInterval = const Duration(seconds: 2), + }) async { + print('=== WAITING FOR CONFIRMATION ==='); + print('TX Hash: $txHash'); + print('Destination: $destinationAddress'); + print('Expected amount: $expectedAmount'); + + String? startBlockHash; + int blocksChecked = 0; + + // Get starting block number for reference + try { + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getHeader', + 'params': [], + }), + ); + final result = jsonDecode(response.body); + final blockNum = result['result']?['number'] as String?; + startBlockHash = result['result']?['parentHash'] as String?; + _log.i('Starting at block: $blockNum'); + } catch (e) { + _log.w('Could not get starting block: $e'); + } + + String? lastBlockHash = startBlockHash; + + for (var attempt = 0; attempt < maxAttempts; attempt++) { + await Future.delayed(pollInterval); + + try { + // Get latest block + final headerResponse = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getHeader', + 'params': [], + }), + ); + final headerResult = jsonDecode(headerResponse.body); + final header = headerResult['result']; + if (header == null) continue; + + final blockNumber = header['number'] as String?; + final parentHash = header['parentHash'] as String?; + + // Get block hash + final hashResponse = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getBlockHash', + 'params': [], + }), + ); + final hashResult = jsonDecode(hashResponse.body); + final currentBlockHash = hashResult['result'] as String?; + + if (currentBlockHash == null || currentBlockHash == lastBlockHash) { + continue; + } + + lastBlockHash = currentBlockHash; + blocksChecked++; + + print('--- Checking block $blockNumber ($currentBlockHash) ---'); + + // Check events in this block for wormhole activity + final eventsKey = '0x${_twox128('System')}${_twox128('Events')}'; + final eventsResponse = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getStorage', + 'params': [eventsKey, currentBlockHash], + }), + ); + final eventsResult = jsonDecode(eventsResponse.body); + final eventsHex = eventsResult['result'] as String?; + + if (eventsHex == null) { + print(' (no events)'); + continue; + } + + // Look for wormhole events in this block (this also prints all events) + final wormholeResult = _checkForWormholeEvents(eventsHex); + + if (wormholeResult != null) { + print('=== WORMHOLE TX FOUND IN BLOCK $blockNumber ==='); + print('Block hash: $currentBlockHash'); + + if (wormholeResult['success'] == true) { + print('STATUS: SUCCESS'); + print('============================================='); + return true; + } else { + print('STATUS: FAILED'); + if (wormholeResult['error'] != null) { + print('Error: ${wormholeResult['error']}'); + } + print('============================================='); + return false; + } + } + } catch (e, st) { + print('Error checking block: $e'); + print('$st'); + } + } + + print('No wormhole transaction found after checking $blocksChecked blocks'); + print('The transaction may still be pending or may have been rejected.'); + return false; + } + + /// Wormhole error names (order from pallet Error enum, index 20) + static const _wormholeErrors = [ + 'InvalidProof', + 'ProofDeserializationFailed', + 'VerificationFailed', + 'InvalidPublicInputs', + 'NullifierAlreadyUsed', + 'VerifierNotAvailable', + 'InvalidStorageRoot', + 'StorageRootMismatch', + 'BlockNotFound', + 'InvalidBlockNumber', + 'AggregatedVerifierNotAvailable', + 'AggregatedProofDeserializationFailed', + 'AggregatedVerificationFailed', + 'InvalidAggregatedPublicInputs', + 'InvalidVolumeFeeRate', + 'TransferAmountBelowMinimum', + ]; + + /// Check events hex for wormhole withdrawal verification activity. + /// Returns null if no withdrawal verification found, or a map with success/failure info. + /// + /// We specifically look for: + /// - Wormhole.ProofVerified -> withdrawal succeeded + /// - System.ExtrinsicFailed (any non-inherent) -> withdrawal failed + /// + /// We ignore Wormhole.NativeTransferred as those are mining rewards, not withdrawals. + Map? _checkForWormholeEvents(String eventsHex) { + final bytes = _hexToBytes( + eventsHex.startsWith('0x') ? eventsHex.substring(2) : eventsHex, + ); + final input = scale.ByteInput(Uint8List.fromList(bytes)); + final allEvents = []; + bool? success; + String? error; + BigInt? exitAmount; + + print('=== DECODING EVENTS (${bytes.length} bytes) ==='); + + try { + // Decode Vec + final numEvents = scale.CompactCodec.codec.decode(input); + print('Block has $numEvents events'); + + for (var i = 0; i < numEvents; i++) { + try { + final eventRecord = EventRecord.decode(input); + final event = eventRecord.event; + final eventName = _getEventName(event); + allEvents.add(eventName); + print(' [$i] $eventName'); + + // Check for Wormhole.ProofVerified - this means withdrawal succeeded + if (event is runtime_event.Wormhole) { + final wormholeEvent = event.value0; + + if (wormholeEvent is wormhole_event.ProofVerified) { + success = true; + exitAmount = wormholeEvent.exitAmount; + print( + ' -> ProofVerified: exitAmount=${_formatAmount(exitAmount)}', + ); + } else if (wormholeEvent is wormhole_event.NativeTransferred) { + // Log but don't treat as withdrawal verification (these are mining rewards) + final toSs58 = _accountIdToSs58( + Uint8List.fromList(wormholeEvent.to), + ); + final fromSs58 = _accountIdToSs58( + Uint8List.fromList(wormholeEvent.from), + ); + print( + ' -> NativeTransferred: from=$fromSs58, to=$toSs58, amount=${_formatAmount(wormholeEvent.amount)}', + ); + } + } + + // Check for System.ExtrinsicFailed - capture any failure (could be our withdrawal tx) + if (event is runtime_event.System) { + final systemEvent = event.value0; + + if (systemEvent is system_event.ExtrinsicFailed) { + // Capture any ExtrinsicFailed as potential withdrawal failure + // The first ExtrinsicSuccess is usually the inherent, so ExtrinsicFailed + // at index > 0 is likely our submitted tx + if (i > 0) { + success = false; + error = _formatDispatchError(systemEvent.dispatchError); + print(' -> ExtrinsicFailed: $error'); + } + } + } + } catch (e) { + print(' [$i] Failed to decode event: $e'); + // Stop decoding on error - remaining events can't be reliably decoded + break; + } + } + } catch (e) { + print('Failed to decode events: $e'); + } + + print('=============================='); + + // Only return result if we found a withdrawal verification (success or failure) + if (success == null) return null; + + return { + 'success': success, + 'events': allEvents, + 'error': error, + 'exitAmount': exitAmount, + }; + } + + /// Format a DispatchError into a human-readable string. + String _formatDispatchError(dispatch_error.DispatchError err) { + if (err is dispatch_error.Module) { + final moduleError = err.value0; + final palletIndex = moduleError.index; + final errorIndex = moduleError.error.isNotEmpty + ? moduleError.error[0] + : 0; + + if (palletIndex == 20 && errorIndex < _wormholeErrors.length) { + return 'Wormhole.${_wormholeErrors[errorIndex]}'; + } + return 'Module(pallet=$palletIndex, error=$errorIndex)'; + } else if (err is dispatch_error.Token) { + return 'Token.${err.value0.toJson()}'; + } else if (err is dispatch_error.Arithmetic) { + return 'Arithmetic.${err.value0.toJson()}'; + } else if (err is dispatch_error.Transactional) { + return 'Transactional.${err.value0.toJson()}'; + } else if (err is dispatch_error.Other) { + return 'Other'; + } else if (err is dispatch_error.CannotLookup) { + return 'CannotLookup'; + } else if (err is dispatch_error.BadOrigin) { + return 'BadOrigin'; + } else if (err is dispatch_error.ConsumerRemaining) { + return 'ConsumerRemaining'; + } else if (err is dispatch_error.NoProviders) { + return 'NoProviders'; + } else if (err is dispatch_error.TooManyConsumers) { + return 'TooManyConsumers'; + } else if (err is dispatch_error.Exhausted) { + return 'Exhausted'; + } else if (err is dispatch_error.Corruption) { + return 'Corruption'; + } else if (err is dispatch_error.Unavailable) { + return 'Unavailable'; + } else if (err is dispatch_error.RootNotAllowed) { + return 'RootNotAllowed'; + } else { + return err.toJson().toString(); + } + } + + /// Get a human-readable name for a runtime event. + String _getEventName(runtime_event.RuntimeEvent event) { + if (event is runtime_event.System) { + return 'System.${event.value0.runtimeType}'; + } else if (event is runtime_event.Wormhole) { + return 'Wormhole.${event.value0.runtimeType}'; + } else if (event is runtime_event.Balances) { + return 'Balances.${event.value0.runtimeType}'; + } else if (event is runtime_event.QPoW) { + return 'QPoW.${event.value0.runtimeType}'; + } else if (event is runtime_event.MiningRewards) { + return 'MiningRewards.${event.value0.runtimeType}'; + } else if (event is runtime_event.TransactionPayment) { + return 'TransactionPayment.${event.value0.runtimeType}'; + } else { + return event.runtimeType.toString(); + } + } + + /// Format amount for display (divide by 10^12 for UNIT). + String _formatAmount(BigInt amount) { + final units = amount ~/ BigInt.from(1000000000000); + final remainder = amount % BigInt.from(1000000000000); + return '$units.${remainder.toString().padLeft(12, '0').substring(0, 4)} UNIT'; + } + + /// Convert AccountId32 bytes to SS58 address with Quantus prefix (189). + String _accountIdToSs58(Uint8List accountId) { + const quantusPrefix = 189; + return ss58.Address(prefix: quantusPrefix, pubkey: accountId).encode(); + } + + /// Get the free balance of an account. + Future _getBalance({ + required String rpcUrl, + required String address, + }) async { + // Decode SS58 address to account ID bytes (prefix-agnostic) + final decoded = ss58.Address.decode(address); + final accountIdHex = _bytesToHex(decoded.pubkey); + + // Build storage key for System.Account(accountId) + // twox128("System") ++ twox128("Account") ++ blake2_128_concat(accountId) + final systemHash = _twox128('System'); + final accountHash = _twox128('Account'); + final accountIdConcat = _blake2128Concat(decoded.pubkey); + + final storageKey = '0x$systemHash$accountHash$accountIdConcat'; + + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getStorage', + 'params': [storageKey], + }), + ); + + final result = jsonDecode(response.body); + + if (result['error'] != null) { + _log.e('Failed to get balance: ${result['error']}'); + return BigInt.zero; + } + + final storageData = result['result'] as String?; + if (storageData == null || storageData == '0x' || storageData.isEmpty) { + return BigInt.zero; + } + + // Decode AccountInfo struct + // Layout: nonce (u32) + consumers (u32) + providers (u32) + sufficients (u32) + AccountData + // AccountData: free (u128) + reserved (u128) + frozen (u128) + flags (u128) + final bytes = _hexToBytes(storageData.substring(2)); + if (bytes.length < 32) { + return BigInt.zero; + } + + // Skip nonce(4) + consumers(4) + providers(4) + sufficients(4) = 16 bytes + // Then read free balance (u128 = 16 bytes, little endian) + final freeBalanceBytes = bytes.sublist(16, 32); + var freeBalance = BigInt.zero; + for (var i = freeBalanceBytes.length - 1; i >= 0; i--) { + freeBalance = (freeBalance << 8) | BigInt.from(freeBalanceBytes[i]); + } + + return freeBalance; + } + + /// Convert bytes to hex string + String _bytesToHex(List bytes) { + return bytes.map((b) => b.toRadixString(16).padLeft(2, '0')).join(); + } + + // ============================================================ + // Helper functions for storage key computation + // ============================================================ + + /// Compute twox128 hash of a string (for Substrate storage key prefixes). + String _twox128(String input) { + final bytes = Uint8List.fromList(utf8.encode(input)); + final hash = Hasher.twoxx128.hash(bytes); + return _bytesToHex(hash); + } + + /// Compute blake2b-128 hash concatenated with input (for Substrate storage keys). + /// Returns: blake2b_128(input) ++ input + String _blake2128Concat(dynamic input) { + final Uint8List bytes; + if (input is List) { + bytes = Uint8List.fromList(input); + } else if (input is String) { + // Assume hex string without 0x prefix + bytes = Uint8List.fromList(_hexToBytes(input)); + } else { + throw ArgumentError( + 'Expected List or hex String, got ${input.runtimeType}', + ); + } + + final hash = Hasher.blake2b128.hash(bytes); + return _bytesToHex(hash) + _bytesToHex(bytes); + } + + String _ss58ToHex(String ss58Address) { + // Convert SS58 address to hex account ID using ss58 package + // This properly handles the Quantus prefix (189) + final decoded = ss58.Address.decode(ss58Address); + final hex = + '0x${decoded.pubkey.map((b) => b.toRadixString(16).padLeft(2, '0')).join()}'; + _log.d('SS58 $ss58Address -> $hex'); + return hex; + } + + List _hexToBytes(String hex) { + final result = []; + for (var i = 0; i < hex.length; i += 2) { + result.add(int.parse(hex.substring(i, i + 2), radix: 16)); + } + return result; + } + + int _decodeU64(List bytes) { + // Little-endian u64 decoding + var result = 0; + for (var i = 0; i < bytes.length && i < 8; i++) { + result |= bytes[i] << (i * 8); + } + return result; + } +} diff --git a/miner-app/lib/src/services/wormhole_address_manager.dart b/miner-app/lib/src/services/wormhole_address_manager.dart new file mode 100644 index 00000000..ce33a78b --- /dev/null +++ b/miner-app/lib/src/services/wormhole_address_manager.dart @@ -0,0 +1,15 @@ +import 'package:quantus_miner/src/services/miner_mnemonic_provider.dart'; +import 'package:quantus_sdk/src/services/wormhole_address_manager.dart' as sdk; + +// Re-export SDK types for backward compatibility +export 'package:quantus_sdk/src/services/wormhole_address_manager.dart' + show WormholeAddressPurpose, TrackedWormholeAddress; + +/// Miner-app specific [WormholeAddressManager] that uses [MinerMnemonicProvider]. +/// +/// This is a convenience wrapper that creates an SDK [WormholeAddressManager] +/// pre-configured with the miner's mnemonic provider. +class WormholeAddressManager extends sdk.WormholeAddressManager { + /// Creates a new WormholeAddressManager using the miner's mnemonic. + WormholeAddressManager() : super(mnemonicProvider: MinerMnemonicProvider()); +} diff --git a/miner-app/lib/src/shared/extensions/snackbar_extensions.dart b/miner-app/lib/src/shared/extensions/snackbar_extensions.dart index 28667de2..faaf24b3 100644 --- a/miner-app/lib/src/shared/extensions/snackbar_extensions.dart +++ b/miner-app/lib/src/shared/extensions/snackbar_extensions.dart @@ -13,11 +13,17 @@ extension SnackbarExtensions on BuildContext { await sh.showCopySnackbar(this, title: title, message: message); } - Future showWarningSnackbar({required String title, required String message}) async { + Future showWarningSnackbar({ + required String title, + required String message, + }) async { await sh.showWarningSnackbar(this, title: title, message: message); } - Future showErrorSnackbar({required String title, required String message}) async { + Future showErrorSnackbar({ + required String title, + required String message, + }) async { await sh.showErrorSnackbar(this, title: title, message: message); } } diff --git a/miner-app/lib/src/ui/logs_widget.dart b/miner-app/lib/src/ui/logs_widget.dart index 8f40a6ea..e18280d1 100644 --- a/miner-app/lib/src/ui/logs_widget.dart +++ b/miner-app/lib/src/ui/logs_widget.dart @@ -20,7 +20,8 @@ class _LogsWidgetState extends State { final List _logs = []; StreamSubscription? _logsSubscription; final ScrollController _scrollController = ScrollController(); - bool _autoScroll = true; + bool _autoScroll = false; // Default to false so users can investigate logs + bool _isUserScrolling = false; @override void initState() { @@ -43,6 +44,11 @@ class _LogsWidgetState extends State { if (widget.orchestrator != null) { _logsSubscription = widget.orchestrator!.logsStream.listen((logEntry) { if (mounted) { + // Store scroll position before adding log + final wasAtBottom = _scrollController.hasClients && + _scrollController.position.pixels >= + _scrollController.position.maxScrollExtent - 50; + setState(() { _logs.add(logEntry); // Keep only the last maxLines entries @@ -51,8 +57,8 @@ class _LogsWidgetState extends State { } }); - // Auto-scroll to bottom if enabled - if (_autoScroll) { + // Auto-scroll to bottom if enabled and not user-scrolling + if (_autoScroll && !_isUserScrolling) { WidgetsBinding.instance.addPostFrameCallback((_) { _scrollToBottom(); }); @@ -64,11 +70,8 @@ class _LogsWidgetState extends State { void _scrollToBottom() { if (_scrollController.hasClients) { - _scrollController.animateTo( - _scrollController.position.maxScrollExtent, - duration: const Duration(milliseconds: 200), - curve: Curves.easeOut, - ); + // Use jumpTo instead of animateTo to prevent jittering + _scrollController.jumpTo(_scrollController.position.maxScrollExtent); } } @@ -114,18 +117,35 @@ class _LogsWidgetState extends State { padding: const EdgeInsets.all(8.0), decoration: BoxDecoration( color: Theme.of(context).primaryColor.useOpacity(0.1), - borderRadius: const BorderRadius.only(topLeft: Radius.circular(12), topRight: Radius.circular(12)), + borderRadius: const BorderRadius.only( + topLeft: Radius.circular(12), + topRight: Radius.circular(12), + ), ), child: Row( children: [ - const Text('Live Logs', style: TextStyle(fontSize: 16, fontWeight: FontWeight.bold)), + const Text( + 'Live Logs', + style: TextStyle(fontSize: 16, fontWeight: FontWeight.bold), + ), const Spacer(), IconButton( - icon: Icon(_autoScroll ? Icons.vertical_align_bottom : Icons.vertical_align_top, size: 20), + icon: Icon( + _autoScroll + ? Icons.vertical_align_bottom + : Icons.vertical_align_top, + size: 20, + ), onPressed: _toggleAutoScroll, - tooltip: _autoScroll ? 'Disable auto-scroll' : 'Enable auto-scroll', + tooltip: _autoScroll + ? 'Disable auto-scroll' + : 'Enable auto-scroll', + ), + IconButton( + icon: const Icon(Icons.clear, size: 20), + onPressed: _clearLogs, + tooltip: 'Clear logs', ), - IconButton(icon: const Icon(Icons.clear, size: 20), onPressed: _clearLogs, tooltip: 'Clear logs'), ], ), ), @@ -139,60 +159,98 @@ class _LogsWidgetState extends State { child: Text( 'No logs available\nStart the node to see live logs', textAlign: TextAlign.center, - style: TextStyle(color: Colors.grey, fontStyle: FontStyle.italic), + style: TextStyle( + color: Colors.grey, + fontStyle: FontStyle.italic, + ), ), ) - : ListView.builder( - controller: _scrollController, - itemCount: _logs.length, - itemBuilder: (context, index) { - final log = _logs[index]; - return Padding( - padding: const EdgeInsets.symmetric(vertical: 2.0), - child: Row( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - // Timestamp - SizedBox( - width: 80, - child: Text( - log.timestamp.toIso8601String().substring(11, 19), - style: TextStyle(fontSize: 12, color: Colors.grey[600], fontFamily: 'monospace'), - ), - ), + : NotificationListener( + onNotification: (notification) { + // Track when user is actively scrolling + if (notification is ScrollStartNotification) { + _isUserScrolling = true; + } else if (notification is ScrollEndNotification) { + _isUserScrolling = false; + // Check if user scrolled to bottom - re-enable auto-scroll + if (_scrollController.hasClients) { + final isAtBottom = _scrollController.position.pixels >= + _scrollController.position.maxScrollExtent - 50; + if (isAtBottom && !_autoScroll) { + // User scrolled to bottom, could re-enable auto-scroll + } + } + } + return false; + }, + child: SelectionArea( + child: ListView.builder( + controller: _scrollController, + itemCount: _logs.length, + itemBuilder: (context, index) { + final log = _logs[index]; + return Padding( + padding: const EdgeInsets.symmetric(vertical: 2.0), + child: Row( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + // Timestamp + SizedBox( + width: 80, + child: Text( + log.timestamp.toIso8601String().substring( + 11, + 19, + ), + style: TextStyle( + fontSize: 12, + color: Colors.grey[600], + fontFamily: 'monospace', + ), + ), + ), - // Source indicator - Container( - width: 12, - height: 12, - margin: const EdgeInsets.only(right: 8, top: 2), - decoration: BoxDecoration(color: _getLogColor(log.source), shape: BoxShape.circle), - ), + // Source indicator + Container( + width: 12, + height: 12, + margin: const EdgeInsets.only(right: 8, top: 2), + decoration: BoxDecoration( + color: _getLogColor(log.source), + shape: BoxShape.circle, + ), + ), - // Source label - SizedBox( - width: 100, - child: Text( - '[${log.source}]', - style: TextStyle( - fontSize: 12, - color: _getLogColor(log.source), - fontWeight: FontWeight.w500, + // Source label + SizedBox( + width: 100, + child: Text( + '[${log.source}]', + style: TextStyle( + fontSize: 12, + color: _getLogColor(log.source), + fontWeight: FontWeight.w500, + ), + ), ), - ), - ), - // Log message - Expanded( - child: SelectableText( - log.message, - style: const TextStyle(fontSize: 12, fontFamily: 'monospace', height: 1.2), - ), + // Log message + Expanded( + child: Text( + log.message, + style: const TextStyle( + fontSize: 12, + fontFamily: 'monospace', + height: 1.2, + ), + ), + ), + ], ), - ], - ), - ); - }, + ); + }, + ), + ), ), ), ), @@ -202,19 +260,32 @@ class _LogsWidgetState extends State { padding: const EdgeInsets.symmetric(horizontal: 8.0, vertical: 4.0), decoration: BoxDecoration( color: Theme.of(context).primaryColor.useOpacity(0.05), - borderRadius: const BorderRadius.only(bottomLeft: Radius.circular(12), bottomRight: Radius.circular(12)), + borderRadius: const BorderRadius.only( + bottomLeft: Radius.circular(12), + bottomRight: Radius.circular(12), + ), ), child: Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [ - Text('Total logs: ${_logs.length}', style: TextStyle(fontSize: 12, color: Colors.grey[600])), + Text( + 'Total logs: ${_logs.length}', + style: TextStyle(fontSize: 12, color: Colors.grey[600]), + ), if (widget.orchestrator?.isMining ?? false) Text( 'Live', - style: TextStyle(fontSize: 12, color: Colors.green, fontWeight: FontWeight.w500), + style: TextStyle( + fontSize: 12, + color: Colors.green, + fontWeight: FontWeight.w500, + ), ) else - Text('Not connected', style: TextStyle(fontSize: 12, color: Colors.grey)), + Text( + 'Not connected', + style: TextStyle(fontSize: 12, color: Colors.grey), + ), ], ), ), diff --git a/miner-app/lib/src/ui/snackbar_helper.dart b/miner-app/lib/src/ui/snackbar_helper.dart index fa971978..942355b1 100644 --- a/miner-app/lib/src/ui/snackbar_helper.dart +++ b/miner-app/lib/src/ui/snackbar_helper.dart @@ -41,11 +41,19 @@ Future showTopSnackBar( ); } -Future showCopySnackbar(BuildContext context, {required String title, required String message}) async { +Future showCopySnackbar( + BuildContext context, { + required String title, + required String message, +}) async { await showTopSnackBar(context, title: title, message: message); } -Future showWarningSnackbar(BuildContext context, {required String title, required String message}) async { +Future showWarningSnackbar( + BuildContext context, { + required String title, + required String message, +}) async { await showTopSnackBar( context, title: title, @@ -54,7 +62,11 @@ Future showWarningSnackbar(BuildContext context, {required String title, r ); } -Future showErrorSnackbar(BuildContext context, {required String title, required String message}) async { +Future showErrorSnackbar( + BuildContext context, { + required String title, + required String message, +}) async { await showTopSnackBar( context, title: title, diff --git a/miner-app/lib/src/ui/top_snackbar_content.dart b/miner-app/lib/src/ui/top_snackbar_content.dart index 98510740..fa9a4e8e 100644 --- a/miner-app/lib/src/ui/top_snackbar_content.dart +++ b/miner-app/lib/src/ui/top_snackbar_content.dart @@ -7,7 +7,12 @@ class TopSnackBarContent extends StatelessWidget { final String message; final Icon? icon; - const TopSnackBarContent({super.key, required this.title, required this.message, this.icon}); + const TopSnackBarContent({ + super.key, + required this.title, + required this.message, + this.icon, + }); @override Widget build(BuildContext context) { @@ -20,7 +25,11 @@ class TopSnackBarContent extends StatelessWidget { shape: OvalBorder(), // Use OvalBorder for circle ), alignment: Alignment.center, - child: Icon(icon?.icon ?? Icons.check, color: icon?.color ?? Colors.white, size: 24), // Default check icon + child: Icon( + icon?.icon ?? Icons.check, + color: icon?.color ?? Colors.white, + size: 24, + ), // Default check icon ); return Container( @@ -33,7 +42,9 @@ class TopSnackBarContent extends StatelessWidget { side: BorderSide(color: Colors.white.useOpacity(0.1), width: 1), ), // Optional shadow for better visibility - shadows: const [BoxShadow(color: Colors.black26, blurRadius: 4, offset: Offset(0, 2))], + shadows: const [ + BoxShadow(color: Colors.black26, blurRadius: 4, offset: Offset(0, 2)), + ], ), child: Row( mainAxisAlignment: MainAxisAlignment.start, diff --git a/miner-app/lib/src/ui/update_banner.dart b/miner-app/lib/src/ui/update_banner.dart index 3db69c67..837ac867 100644 --- a/miner-app/lib/src/ui/update_banner.dart +++ b/miner-app/lib/src/ui/update_banner.dart @@ -29,7 +29,13 @@ class UpdateBanner extends StatelessWidget { width: double.infinity, decoration: BoxDecoration( color: backgroundColor ?? Colors.blue.shade500, - boxShadow: [BoxShadow(color: Colors.black.useOpacity(0.1), blurRadius: 4, offset: const Offset(0, 2))], + boxShadow: [ + BoxShadow( + color: Colors.black.useOpacity(0.1), + blurRadius: 4, + offset: const Offset(0, 2), + ), + ], ), child: SafeArea( bottom: false, @@ -37,7 +43,11 @@ class UpdateBanner extends StatelessWidget { padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 12), child: Row( children: [ - Icon(icon ?? Icons.download, color: textColor ?? Colors.white, size: 24), + Icon( + icon ?? Icons.download, + color: textColor ?? Colors.white, + size: 24, + ), const SizedBox(width: 12), Expanded( child: Column( @@ -46,35 +56,57 @@ class UpdateBanner extends StatelessWidget { children: [ Text( message, - style: TextStyle(color: textColor ?? Colors.white, fontSize: 14, fontWeight: FontWeight.w600), + style: TextStyle( + color: textColor ?? Colors.white, + fontSize: 14, + fontWeight: FontWeight.w600, + ), ), const SizedBox(height: 2), Text( 'Version $version', - style: TextStyle(color: (textColor ?? Colors.white).useOpacity(0.9), fontSize: 12), + style: TextStyle( + color: (textColor ?? Colors.white).useOpacity(0.9), + fontSize: 12, + ), ), ], ), ), const SizedBox(width: 8), if (updateProgress != null) - SizedBox(width: 100, child: LinearProgressIndicator(value: updateProgress)) + SizedBox( + width: 100, + child: LinearProgressIndicator(value: updateProgress), + ) else ElevatedButton( onPressed: onUpdate, style: ElevatedButton.styleFrom( backgroundColor: Colors.white, foregroundColor: Colors.black, - padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 8), - shape: RoundedRectangleBorder(borderRadius: BorderRadius.circular(20)), + padding: const EdgeInsets.symmetric( + horizontal: 16, + vertical: 8, + ), + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(20), + ), + ), + child: const Text( + 'Update', + style: TextStyle(fontWeight: FontWeight.bold), ), - child: const Text('Update', style: TextStyle(fontWeight: FontWeight.bold)), ), if (onDismiss != null && updateProgress == null) ...[ const SizedBox(width: 8), IconButton( onPressed: onDismiss, - icon: Icon(Icons.close, color: textColor ?? Colors.white, size: 20), + icon: Icon( + Icons.close, + color: textColor ?? Colors.white, + size: 20, + ), padding: EdgeInsets.zero, constraints: const BoxConstraints(), ), diff --git a/miner-app/lib/src/utils/app_logger.dart b/miner-app/lib/src/utils/app_logger.dart index 276cc602..54c1bc3b 100644 --- a/miner-app/lib/src/utils/app_logger.dart +++ b/miner-app/lib/src/utils/app_logger.dart @@ -106,27 +106,87 @@ class TaggedLoggerWrapper { TaggedLoggerWrapper(this._logger, this._tag); - void t(dynamic message, {DateTime? time, Object? error, StackTrace? stackTrace}) { - _logger.t('[$_tag] $message', time: time, error: error, stackTrace: stackTrace); + void t( + dynamic message, { + DateTime? time, + Object? error, + StackTrace? stackTrace, + }) { + _logger.t( + '[$_tag] $message', + time: time, + error: error, + stackTrace: stackTrace, + ); } - void d(dynamic message, {DateTime? time, Object? error, StackTrace? stackTrace}) { - _logger.d('[$_tag] $message', time: time, error: error, stackTrace: stackTrace); + void d( + dynamic message, { + DateTime? time, + Object? error, + StackTrace? stackTrace, + }) { + _logger.d( + '[$_tag] $message', + time: time, + error: error, + stackTrace: stackTrace, + ); } - void i(dynamic message, {DateTime? time, Object? error, StackTrace? stackTrace}) { - _logger.i('[$_tag] $message', time: time, error: error, stackTrace: stackTrace); + void i( + dynamic message, { + DateTime? time, + Object? error, + StackTrace? stackTrace, + }) { + _logger.i( + '[$_tag] $message', + time: time, + error: error, + stackTrace: stackTrace, + ); } - void w(dynamic message, {DateTime? time, Object? error, StackTrace? stackTrace}) { - _logger.w('[$_tag] $message', time: time, error: error, stackTrace: stackTrace); + void w( + dynamic message, { + DateTime? time, + Object? error, + StackTrace? stackTrace, + }) { + _logger.w( + '[$_tag] $message', + time: time, + error: error, + stackTrace: stackTrace, + ); } - void e(dynamic message, {DateTime? time, Object? error, StackTrace? stackTrace}) { - _logger.e('[$_tag] $message', time: time, error: error, stackTrace: stackTrace); + void e( + dynamic message, { + DateTime? time, + Object? error, + StackTrace? stackTrace, + }) { + _logger.e( + '[$_tag] $message', + time: time, + error: error, + stackTrace: stackTrace, + ); } - void f(dynamic message, {DateTime? time, Object? error, StackTrace? stackTrace}) { - _logger.f('[$_tag] $message', time: time, error: error, stackTrace: stackTrace); + void f( + dynamic message, { + DateTime? time, + Object? error, + StackTrace? stackTrace, + }) { + _logger.f( + '[$_tag] $message', + time: time, + error: error, + stackTrace: stackTrace, + ); } } diff --git a/miner-app/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/miner-app/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index 056198c2..3a40d451 100644 --- a/miner-app/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/miner-app/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -51,7 +51,7 @@ =3.10.0 <4.0.0" - flutter: ">=3.38.0" + dart: ">=3.9.0 <4.0.0" + flutter: ">=3.35.0" diff --git a/quantus_sdk/.gitignore b/quantus_sdk/.gitignore new file mode 100644 index 00000000..242b6132 --- /dev/null +++ b/quantus_sdk/.gitignore @@ -0,0 +1 @@ +assets/circuits/*.bin diff --git a/quantus_sdk/assets/circuits/config.json b/quantus_sdk/assets/circuits/config.json new file mode 100644 index 00000000..cd5864ce --- /dev/null +++ b/quantus_sdk/assets/circuits/config.json @@ -0,0 +1,11 @@ +{ + "num_leaf_proofs": 16, + "hashes": { + "common": "672689a87e8ed780337c0752ebc7fd1db6a63611fbd59b4ad0cbe4a4d97edcf2", + "verifier": "bb017485b12fb9c6d0b5c3db8b68f417bd3f75b2d5f3a2ea5fe12b6244233372", + "prover": "78c114c7290b04bac00551a590fd652f98194653b10ac4e11b0c0ddd5c7c0976", + "aggregated_common": "af4461081f6fb527d2b9ffb74479a133ed8b92cdd3554b46adc481a0dfc38b5d", + "aggregated_verifier": "90350437c8e0e2144ca849623ea0b58edd2decd7bdf6b728b32e1aa9d8f1e337", + "dummy_proof": "ff80d9291a846edd5ef62c1908653f0d421534ce6b579bbbda4ed5093a17c4f3" + } +} \ No newline at end of file diff --git a/quantus_sdk/lib/generated/planck/pallets/assets.dart b/quantus_sdk/lib/generated/planck/pallets/assets.dart new file mode 100644 index 00000000..b6e99c2a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/assets.dart @@ -0,0 +1,876 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i8; +import 'dart:typed_data' as _i9; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i3; + +import '../types/pallet_assets/pallet/call.dart' as _i12; +import '../types/pallet_assets/types/approval.dart' as _i6; +import '../types/pallet_assets/types/asset_account.dart' as _i5; +import '../types/pallet_assets/types/asset_details.dart' as _i2; +import '../types/pallet_assets/types/asset_metadata.dart' as _i7; +import '../types/quantus_runtime/runtime_call.dart' as _i10; +import '../types/sp_core/crypto/account_id32.dart' as _i4; +import '../types/sp_runtime/multiaddress/multi_address.dart' as _i11; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageMap _asset = const _i1.StorageMap( + prefix: 'Assets', + storage: 'Asset', + valueCodec: _i2.AssetDetails.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i3.U32Codec.codec), + ); + + final _i1.StorageDoubleMap _account = + const _i1.StorageDoubleMap( + prefix: 'Assets', + storage: 'Account', + valueCodec: _i5.AssetAccount.codec, + hasher1: _i1.StorageHasher.blake2b128Concat(_i3.U32Codec.codec), + hasher2: _i1.StorageHasher.blake2b128Concat(_i4.AccountId32Codec()), + ); + + final _i1.StorageTripleMap _approvals = + const _i1.StorageTripleMap( + prefix: 'Assets', + storage: 'Approvals', + valueCodec: _i6.Approval.codec, + hasher1: _i1.StorageHasher.blake2b128Concat(_i3.U32Codec.codec), + hasher2: _i1.StorageHasher.blake2b128Concat(_i4.AccountId32Codec()), + hasher3: _i1.StorageHasher.blake2b128Concat(_i4.AccountId32Codec()), + ); + + final _i1.StorageMap _metadata = const _i1.StorageMap( + prefix: 'Assets', + storage: 'Metadata', + valueCodec: _i7.AssetMetadata.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i3.U32Codec.codec), + ); + + final _i1.StorageValue _nextAssetId = const _i1.StorageValue( + prefix: 'Assets', + storage: 'NextAssetId', + valueCodec: _i3.U32Codec.codec, + ); + + /// Details of an asset. + _i8.Future<_i2.AssetDetails?> asset(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _asset.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _asset.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The holdings of a specific account for a specific asset. + _i8.Future<_i5.AssetAccount?> account(int key1, _i4.AccountId32 key2, {_i1.BlockHash? at}) async { + final hashedKey = _account.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _account.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Approved balance transfers. First balance is the amount approved for transfer. Second + /// is the amount of `T::Currency` reserved for storing this. + /// First key is the asset ID, second key is the owner and third key is the delegate. + _i8.Future<_i6.Approval?> approvals(int key1, _i4.AccountId32 key2, _i4.AccountId32 key3, {_i1.BlockHash? at}) async { + final hashedKey = _approvals.hashedKeyFor(key1, key2, key3); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _approvals.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Metadata of an asset. + _i8.Future<_i7.AssetMetadata> metadata(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _metadata.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _metadata.decodeValue(bytes); + } + return _i7.AssetMetadata( + deposit: BigInt.zero, + name: List.filled(0, 0, growable: true), + symbol: List.filled(0, 0, growable: true), + decimals: 0, + isFrozen: false, + ); /* Default */ + } + + /// The asset ID enforced for the next asset creation, if any present. Otherwise, this storage + /// item has no effect. + /// + /// This can be useful for setting up constraints for IDs of the new assets. For example, by + /// providing an initial [`NextAssetId`] and using the [`crate::AutoIncAssetId`] callback, an + /// auto-increment model can be applied to all new asset IDs. + /// + /// The initial next asset ID can be set using the [`GenesisConfig`] or the + /// [SetNextAssetId](`migration::next_asset_id::SetNextAssetId`) migration. + _i8.Future nextAssetId({_i1.BlockHash? at}) async { + final hashedKey = _nextAssetId.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _nextAssetId.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Details of an asset. + _i8.Future> multiAsset(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _asset.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _asset.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Metadata of an asset. + _i8.Future> multiMetadata(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _metadata.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _metadata.decodeValue(v.key)).toList(); + } + return (keys + .map( + (key) => _i7.AssetMetadata( + deposit: BigInt.zero, + name: List.filled(0, 0, growable: true), + symbol: List.filled(0, 0, growable: true), + decimals: 0, + isFrozen: false, + ), + ) + .toList() + as List<_i7.AssetMetadata>); /* Default */ + } + + /// Returns the storage key for `asset`. + _i9.Uint8List assetKey(int key1) { + final hashedKey = _asset.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `account`. + _i9.Uint8List accountKey(int key1, _i4.AccountId32 key2) { + final hashedKey = _account.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage key for `approvals`. + _i9.Uint8List approvalsKey(int key1, _i4.AccountId32 key2, _i4.AccountId32 key3) { + final hashedKey = _approvals.hashedKeyFor(key1, key2, key3); + return hashedKey; + } + + /// Returns the storage key for `metadata`. + _i9.Uint8List metadataKey(int key1) { + final hashedKey = _metadata.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `nextAssetId`. + _i9.Uint8List nextAssetIdKey() { + final hashedKey = _nextAssetId.hashedKey(); + return hashedKey; + } + + /// Returns the storage map key prefix for `asset`. + _i9.Uint8List assetMapPrefix() { + final hashedKey = _asset.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `account`. + _i9.Uint8List accountMapPrefix(int key1) { + final hashedKey = _account.mapPrefix(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `metadata`. + _i9.Uint8List metadataMapPrefix() { + final hashedKey = _metadata.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Issue a new class of fungible assets from a public origin. + /// + /// This new asset class has no assets initially and its owner is the origin. + /// + /// The origin must conform to the configured `CreateOrigin` and have sufficient funds free. + /// + /// Funds of sender are reserved by `AssetDeposit`. + /// + /// Parameters: + /// - `id`: The identifier of the new asset. This must not be currently in use to identify + /// an existing asset. If [`NextAssetId`] is set, then this must be equal to it. + /// - `admin`: The admin of this class of assets. The admin is the initial address of each + /// member of the asset class's admin team. + /// - `min_balance`: The minimum balance of this new asset that any single account must + /// have. If an account's balance is reduced below this, then it collapses to zero. + /// + /// Emits `Created` event when successful. + /// + /// Weight: `O(1)` + _i10.Assets create({required BigInt id, required _i11.MultiAddress admin, required BigInt minBalance}) { + return _i10.Assets(_i12.Create(id: id, admin: admin, minBalance: minBalance)); + } + + /// Issue a new class of fungible assets from a privileged origin. + /// + /// This new asset class has no assets initially. + /// + /// The origin must conform to `ForceOrigin`. + /// + /// Unlike `create`, no funds are reserved. + /// + /// - `id`: The identifier of the new asset. This must not be currently in use to identify + /// an existing asset. If [`NextAssetId`] is set, then this must be equal to it. + /// - `owner`: The owner of this class of assets. The owner has full superuser permissions + /// over this asset, but may later change and configure the permissions using + /// `transfer_ownership` and `set_team`. + /// - `min_balance`: The minimum balance of this new asset that any single account must + /// have. If an account's balance is reduced below this, then it collapses to zero. + /// + /// Emits `ForceCreated` event when successful. + /// + /// Weight: `O(1)` + _i10.Assets forceCreate({ + required BigInt id, + required _i11.MultiAddress owner, + required bool isSufficient, + required BigInt minBalance, + }) { + return _i10.Assets(_i12.ForceCreate(id: id, owner: owner, isSufficient: isSufficient, minBalance: minBalance)); + } + + /// Start the process of destroying a fungible asset class. + /// + /// `start_destroy` is the first in a series of extrinsics that should be called, to allow + /// destruction of an asset class. + /// + /// The origin must conform to `ForceOrigin` or must be `Signed` by the asset's `owner`. + /// + /// - `id`: The identifier of the asset to be destroyed. This must identify an existing + /// asset. + /// + /// It will fail with either [`Error::ContainsHolds`] or [`Error::ContainsFreezes`] if + /// an account contains holds or freezes in place. + _i10.Assets startDestroy({required BigInt id}) { + return _i10.Assets(_i12.StartDestroy(id: id)); + } + + /// Destroy all accounts associated with a given asset. + /// + /// `destroy_accounts` should only be called after `start_destroy` has been called, and the + /// asset is in a `Destroying` state. + /// + /// Due to weight restrictions, this function may need to be called multiple times to fully + /// destroy all accounts. It will destroy `RemoveItemsLimit` accounts at a time. + /// + /// - `id`: The identifier of the asset to be destroyed. This must identify an existing + /// asset. + /// + /// Each call emits the `Event::DestroyedAccounts` event. + _i10.Assets destroyAccounts({required BigInt id}) { + return _i10.Assets(_i12.DestroyAccounts(id: id)); + } + + /// Destroy all approvals associated with a given asset up to the max (T::RemoveItemsLimit). + /// + /// `destroy_approvals` should only be called after `start_destroy` has been called, and the + /// asset is in a `Destroying` state. + /// + /// Due to weight restrictions, this function may need to be called multiple times to fully + /// destroy all approvals. It will destroy `RemoveItemsLimit` approvals at a time. + /// + /// - `id`: The identifier of the asset to be destroyed. This must identify an existing + /// asset. + /// + /// Each call emits the `Event::DestroyedApprovals` event. + _i10.Assets destroyApprovals({required BigInt id}) { + return _i10.Assets(_i12.DestroyApprovals(id: id)); + } + + /// Complete destroying asset and unreserve currency. + /// + /// `finish_destroy` should only be called after `start_destroy` has been called, and the + /// asset is in a `Destroying` state. All accounts or approvals should be destroyed before + /// hand. + /// + /// - `id`: The identifier of the asset to be destroyed. This must identify an existing + /// asset. + /// + /// Each successful call emits the `Event::Destroyed` event. + _i10.Assets finishDestroy({required BigInt id}) { + return _i10.Assets(_i12.FinishDestroy(id: id)); + } + + /// Mint assets of a particular class. + /// + /// The origin must be Signed and the sender must be the Issuer of the asset `id`. + /// + /// - `id`: The identifier of the asset to have some amount minted. + /// - `beneficiary`: The account to be credited with the minted assets. + /// - `amount`: The amount of the asset to be minted. + /// + /// Emits `Issued` event when successful. + /// + /// Weight: `O(1)` + /// Modes: Pre-existing balance of `beneficiary`; Account pre-existence of `beneficiary`. + _i10.Assets mint({required BigInt id, required _i11.MultiAddress beneficiary, required BigInt amount}) { + return _i10.Assets(_i12.Mint(id: id, beneficiary: beneficiary, amount: amount)); + } + + /// Reduce the balance of `who` by as much as possible up to `amount` assets of `id`. + /// + /// Origin must be Signed and the sender should be the Manager of the asset `id`. + /// + /// Bails with `NoAccount` if the `who` is already dead. + /// + /// - `id`: The identifier of the asset to have some amount burned. + /// - `who`: The account to be debited from. + /// - `amount`: The maximum amount by which `who`'s balance should be reduced. + /// + /// Emits `Burned` with the actual amount burned. If this takes the balance to below the + /// minimum for the asset, then the amount burned is increased to take it to zero. + /// + /// Weight: `O(1)` + /// Modes: Post-existence of `who`; Pre & post Zombie-status of `who`. + _i10.Assets burn({required BigInt id, required _i11.MultiAddress who, required BigInt amount}) { + return _i10.Assets(_i12.Burn(id: id, who: who, amount: amount)); + } + + /// Move some assets from the sender account to another. + /// + /// Origin must be Signed. + /// + /// - `id`: The identifier of the asset to have some amount transferred. + /// - `target`: The account to be credited. + /// - `amount`: The amount by which the sender's balance of assets should be reduced and + /// `target`'s balance increased. The amount actually transferred may be slightly greater in + /// the case that the transfer would otherwise take the sender balance above zero but below + /// the minimum balance. Must be greater than zero. + /// + /// Emits `Transferred` with the actual amount transferred. If this takes the source balance + /// to below the minimum for the asset, then the amount transferred is increased to take it + /// to zero. + /// + /// Weight: `O(1)` + /// Modes: Pre-existence of `target`; Post-existence of sender; Account pre-existence of + /// `target`. + _i10.Assets transfer({required BigInt id, required _i11.MultiAddress target, required BigInt amount}) { + return _i10.Assets(_i12.Transfer(id: id, target: target, amount: amount)); + } + + /// Move some assets from the sender account to another, keeping the sender account alive. + /// + /// Origin must be Signed. + /// + /// - `id`: The identifier of the asset to have some amount transferred. + /// - `target`: The account to be credited. + /// - `amount`: The amount by which the sender's balance of assets should be reduced and + /// `target`'s balance increased. The amount actually transferred may be slightly greater in + /// the case that the transfer would otherwise take the sender balance above zero but below + /// the minimum balance. Must be greater than zero. + /// + /// Emits `Transferred` with the actual amount transferred. If this takes the source balance + /// to below the minimum for the asset, then the amount transferred is increased to take it + /// to zero. + /// + /// Weight: `O(1)` + /// Modes: Pre-existence of `target`; Post-existence of sender; Account pre-existence of + /// `target`. + _i10.Assets transferKeepAlive({required BigInt id, required _i11.MultiAddress target, required BigInt amount}) { + return _i10.Assets(_i12.TransferKeepAlive(id: id, target: target, amount: amount)); + } + + /// Move some assets from one account to another. + /// + /// Origin must be Signed and the sender should be the Admin of the asset `id`. + /// + /// - `id`: The identifier of the asset to have some amount transferred. + /// - `source`: The account to be debited. + /// - `dest`: The account to be credited. + /// - `amount`: The amount by which the `source`'s balance of assets should be reduced and + /// `dest`'s balance increased. The amount actually transferred may be slightly greater in + /// the case that the transfer would otherwise take the `source` balance above zero but + /// below the minimum balance. Must be greater than zero. + /// + /// Emits `Transferred` with the actual amount transferred. If this takes the source balance + /// to below the minimum for the asset, then the amount transferred is increased to take it + /// to zero. + /// + /// Weight: `O(1)` + /// Modes: Pre-existence of `dest`; Post-existence of `source`; Account pre-existence of + /// `dest`. + _i10.Assets forceTransfer({ + required BigInt id, + required _i11.MultiAddress source, + required _i11.MultiAddress dest, + required BigInt amount, + }) { + return _i10.Assets(_i12.ForceTransfer(id: id, source: source, dest: dest, amount: amount)); + } + + /// Disallow further unprivileged transfers of an asset `id` from an account `who`. `who` + /// must already exist as an entry in `Account`s of the asset. If you want to freeze an + /// account that does not have an entry, use `touch_other` first. + /// + /// Origin must be Signed and the sender should be the Freezer of the asset `id`. + /// + /// - `id`: The identifier of the asset to be frozen. + /// - `who`: The account to be frozen. + /// + /// Emits `Frozen`. + /// + /// Weight: `O(1)` + _i10.Assets freeze({required BigInt id, required _i11.MultiAddress who}) { + return _i10.Assets(_i12.Freeze(id: id, who: who)); + } + + /// Allow unprivileged transfers to and from an account again. + /// + /// Origin must be Signed and the sender should be the Admin of the asset `id`. + /// + /// - `id`: The identifier of the asset to be frozen. + /// - `who`: The account to be unfrozen. + /// + /// Emits `Thawed`. + /// + /// Weight: `O(1)` + _i10.Assets thaw({required BigInt id, required _i11.MultiAddress who}) { + return _i10.Assets(_i12.Thaw(id: id, who: who)); + } + + /// Disallow further unprivileged transfers for the asset class. + /// + /// Origin must be Signed and the sender should be the Freezer of the asset `id`. + /// + /// - `id`: The identifier of the asset to be frozen. + /// + /// Emits `Frozen`. + /// + /// Weight: `O(1)` + _i10.Assets freezeAsset({required BigInt id}) { + return _i10.Assets(_i12.FreezeAsset(id: id)); + } + + /// Allow unprivileged transfers for the asset again. + /// + /// Origin must be Signed and the sender should be the Admin of the asset `id`. + /// + /// - `id`: The identifier of the asset to be thawed. + /// + /// Emits `Thawed`. + /// + /// Weight: `O(1)` + _i10.Assets thawAsset({required BigInt id}) { + return _i10.Assets(_i12.ThawAsset(id: id)); + } + + /// Change the Owner of an asset. + /// + /// Origin must be Signed and the sender should be the Owner of the asset `id`. + /// + /// - `id`: The identifier of the asset. + /// - `owner`: The new Owner of this asset. + /// + /// Emits `OwnerChanged`. + /// + /// Weight: `O(1)` + _i10.Assets transferOwnership({required BigInt id, required _i11.MultiAddress owner}) { + return _i10.Assets(_i12.TransferOwnership(id: id, owner: owner)); + } + + /// Change the Issuer, Admin and Freezer of an asset. + /// + /// Origin must be Signed and the sender should be the Owner of the asset `id`. + /// + /// - `id`: The identifier of the asset to be frozen. + /// - `issuer`: The new Issuer of this asset. + /// - `admin`: The new Admin of this asset. + /// - `freezer`: The new Freezer of this asset. + /// + /// Emits `TeamChanged`. + /// + /// Weight: `O(1)` + _i10.Assets setTeam({ + required BigInt id, + required _i11.MultiAddress issuer, + required _i11.MultiAddress admin, + required _i11.MultiAddress freezer, + }) { + return _i10.Assets(_i12.SetTeam(id: id, issuer: issuer, admin: admin, freezer: freezer)); + } + + /// Set the metadata for an asset. + /// + /// Origin must be Signed and the sender should be the Owner of the asset `id`. + /// + /// Funds of sender are reserved according to the formula: + /// `MetadataDepositBase + MetadataDepositPerByte * (name.len + symbol.len)` taking into + /// account any already reserved funds. + /// + /// - `id`: The identifier of the asset to update. + /// - `name`: The user friendly name of this asset. Limited in length by `StringLimit`. + /// - `symbol`: The exchange symbol for this asset. Limited in length by `StringLimit`. + /// - `decimals`: The number of decimals this asset uses to represent one unit. + /// + /// Emits `MetadataSet`. + /// + /// Weight: `O(1)` + _i10.Assets setMetadata({ + required BigInt id, + required List name, + required List symbol, + required int decimals, + }) { + return _i10.Assets(_i12.SetMetadata(id: id, name: name, symbol: symbol, decimals: decimals)); + } + + /// Clear the metadata for an asset. + /// + /// Origin must be Signed and the sender should be the Owner of the asset `id`. + /// + /// Any deposit is freed for the asset owner. + /// + /// - `id`: The identifier of the asset to clear. + /// + /// Emits `MetadataCleared`. + /// + /// Weight: `O(1)` + _i10.Assets clearMetadata({required BigInt id}) { + return _i10.Assets(_i12.ClearMetadata(id: id)); + } + + /// Force the metadata for an asset to some value. + /// + /// Origin must be ForceOrigin. + /// + /// Any deposit is left alone. + /// + /// - `id`: The identifier of the asset to update. + /// - `name`: The user friendly name of this asset. Limited in length by `StringLimit`. + /// - `symbol`: The exchange symbol for this asset. Limited in length by `StringLimit`. + /// - `decimals`: The number of decimals this asset uses to represent one unit. + /// + /// Emits `MetadataSet`. + /// + /// Weight: `O(N + S)` where N and S are the length of the name and symbol respectively. + _i10.Assets forceSetMetadata({ + required BigInt id, + required List name, + required List symbol, + required int decimals, + required bool isFrozen, + }) { + return _i10.Assets( + _i12.ForceSetMetadata(id: id, name: name, symbol: symbol, decimals: decimals, isFrozen: isFrozen), + ); + } + + /// Clear the metadata for an asset. + /// + /// Origin must be ForceOrigin. + /// + /// Any deposit is returned. + /// + /// - `id`: The identifier of the asset to clear. + /// + /// Emits `MetadataCleared`. + /// + /// Weight: `O(1)` + _i10.Assets forceClearMetadata({required BigInt id}) { + return _i10.Assets(_i12.ForceClearMetadata(id: id)); + } + + /// Alter the attributes of a given asset. + /// + /// Origin must be `ForceOrigin`. + /// + /// - `id`: The identifier of the asset. + /// - `owner`: The new Owner of this asset. + /// - `issuer`: The new Issuer of this asset. + /// - `admin`: The new Admin of this asset. + /// - `freezer`: The new Freezer of this asset. + /// - `min_balance`: The minimum balance of this new asset that any single account must + /// have. If an account's balance is reduced below this, then it collapses to zero. + /// - `is_sufficient`: Whether a non-zero balance of this asset is deposit of sufficient + /// value to account for the state bloat associated with its balance storage. If set to + /// `true`, then non-zero balances may be stored without a `consumer` reference (and thus + /// an ED in the Balances pallet or whatever else is used to control user-account state + /// growth). + /// - `is_frozen`: Whether this asset class is frozen except for permissioned/admin + /// instructions. + /// + /// Emits `AssetStatusChanged` with the identity of the asset. + /// + /// Weight: `O(1)` + _i10.Assets forceAssetStatus({ + required BigInt id, + required _i11.MultiAddress owner, + required _i11.MultiAddress issuer, + required _i11.MultiAddress admin, + required _i11.MultiAddress freezer, + required BigInt minBalance, + required bool isSufficient, + required bool isFrozen, + }) { + return _i10.Assets( + _i12.ForceAssetStatus( + id: id, + owner: owner, + issuer: issuer, + admin: admin, + freezer: freezer, + minBalance: minBalance, + isSufficient: isSufficient, + isFrozen: isFrozen, + ), + ); + } + + /// Approve an amount of asset for transfer by a delegated third-party account. + /// + /// Origin must be Signed. + /// + /// Ensures that `ApprovalDeposit` worth of `Currency` is reserved from signing account + /// for the purpose of holding the approval. If some non-zero amount of assets is already + /// approved from signing account to `delegate`, then it is topped up or unreserved to + /// meet the right value. + /// + /// NOTE: The signing account does not need to own `amount` of assets at the point of + /// making this call. + /// + /// - `id`: The identifier of the asset. + /// - `delegate`: The account to delegate permission to transfer asset. + /// - `amount`: The amount of asset that may be transferred by `delegate`. If there is + /// already an approval in place, then this acts additively. + /// + /// Emits `ApprovedTransfer` on success. + /// + /// Weight: `O(1)` + _i10.Assets approveTransfer({required BigInt id, required _i11.MultiAddress delegate, required BigInt amount}) { + return _i10.Assets(_i12.ApproveTransfer(id: id, delegate: delegate, amount: amount)); + } + + /// Cancel all of some asset approved for delegated transfer by a third-party account. + /// + /// Origin must be Signed and there must be an approval in place between signer and + /// `delegate`. + /// + /// Unreserves any deposit previously reserved by `approve_transfer` for the approval. + /// + /// - `id`: The identifier of the asset. + /// - `delegate`: The account delegated permission to transfer asset. + /// + /// Emits `ApprovalCancelled` on success. + /// + /// Weight: `O(1)` + _i10.Assets cancelApproval({required BigInt id, required _i11.MultiAddress delegate}) { + return _i10.Assets(_i12.CancelApproval(id: id, delegate: delegate)); + } + + /// Cancel all of some asset approved for delegated transfer by a third-party account. + /// + /// Origin must be either ForceOrigin or Signed origin with the signer being the Admin + /// account of the asset `id`. + /// + /// Unreserves any deposit previously reserved by `approve_transfer` for the approval. + /// + /// - `id`: The identifier of the asset. + /// - `delegate`: The account delegated permission to transfer asset. + /// + /// Emits `ApprovalCancelled` on success. + /// + /// Weight: `O(1)` + _i10.Assets forceCancelApproval({ + required BigInt id, + required _i11.MultiAddress owner, + required _i11.MultiAddress delegate, + }) { + return _i10.Assets(_i12.ForceCancelApproval(id: id, owner: owner, delegate: delegate)); + } + + /// Transfer some asset balance from a previously delegated account to some third-party + /// account. + /// + /// Origin must be Signed and there must be an approval in place by the `owner` to the + /// signer. + /// + /// If the entire amount approved for transfer is transferred, then any deposit previously + /// reserved by `approve_transfer` is unreserved. + /// + /// - `id`: The identifier of the asset. + /// - `owner`: The account which previously approved for a transfer of at least `amount` and + /// from which the asset balance will be withdrawn. + /// - `destination`: The account to which the asset balance of `amount` will be transferred. + /// - `amount`: The amount of assets to transfer. + /// + /// Emits `TransferredApproved` on success. + /// + /// Weight: `O(1)` + _i10.Assets transferApproved({ + required BigInt id, + required _i11.MultiAddress owner, + required _i11.MultiAddress destination, + required BigInt amount, + }) { + return _i10.Assets(_i12.TransferApproved(id: id, owner: owner, destination: destination, amount: amount)); + } + + /// Create an asset account for non-provider assets. + /// + /// A deposit will be taken from the signer account. + /// + /// - `origin`: Must be Signed; the signer account must have sufficient funds for a deposit + /// to be taken. + /// - `id`: The identifier of the asset for the account to be created. + /// + /// Emits `Touched` event when successful. + _i10.Assets touch({required BigInt id}) { + return _i10.Assets(_i12.Touch(id: id)); + } + + /// Return the deposit (if any) of an asset account or a consumer reference (if any) of an + /// account. + /// + /// The origin must be Signed. + /// + /// - `id`: The identifier of the asset for which the caller would like the deposit + /// refunded. + /// - `allow_burn`: If `true` then assets may be destroyed in order to complete the refund. + /// + /// It will fail with either [`Error::ContainsHolds`] or [`Error::ContainsFreezes`] if + /// the asset account contains holds or freezes in place. + /// + /// Emits `Refunded` event when successful. + _i10.Assets refund({required BigInt id, required bool allowBurn}) { + return _i10.Assets(_i12.Refund(id: id, allowBurn: allowBurn)); + } + + /// Sets the minimum balance of an asset. + /// + /// Only works if there aren't any accounts that are holding the asset or if + /// the new value of `min_balance` is less than the old one. + /// + /// Origin must be Signed and the sender has to be the Owner of the + /// asset `id`. + /// + /// - `id`: The identifier of the asset. + /// - `min_balance`: The new value of `min_balance`. + /// + /// Emits `AssetMinBalanceChanged` event when successful. + _i10.Assets setMinBalance({required BigInt id, required BigInt minBalance}) { + return _i10.Assets(_i12.SetMinBalance(id: id, minBalance: minBalance)); + } + + /// Create an asset account for `who`. + /// + /// A deposit will be taken from the signer account. + /// + /// - `origin`: Must be Signed by `Freezer` or `Admin` of the asset `id`; the signer account + /// must have sufficient funds for a deposit to be taken. + /// - `id`: The identifier of the asset for the account to be created. + /// - `who`: The account to be created. + /// + /// Emits `Touched` event when successful. + _i10.Assets touchOther({required BigInt id, required _i11.MultiAddress who}) { + return _i10.Assets(_i12.TouchOther(id: id, who: who)); + } + + /// Return the deposit (if any) of a target asset account. Useful if you are the depositor. + /// + /// The origin must be Signed and either the account owner, depositor, or asset `Admin`. In + /// order to burn a non-zero balance of the asset, the caller must be the account and should + /// use `refund`. + /// + /// - `id`: The identifier of the asset for the account holding a deposit. + /// - `who`: The account to refund. + /// + /// It will fail with either [`Error::ContainsHolds`] or [`Error::ContainsFreezes`] if + /// the asset account contains holds or freezes in place. + /// + /// Emits `Refunded` event when successful. + _i10.Assets refundOther({required BigInt id, required _i11.MultiAddress who}) { + return _i10.Assets(_i12.RefundOther(id: id, who: who)); + } + + /// Disallow further unprivileged transfers of an asset `id` to and from an account `who`. + /// + /// Origin must be Signed and the sender should be the Freezer of the asset `id`. + /// + /// - `id`: The identifier of the account's asset. + /// - `who`: The account to be unblocked. + /// + /// Emits `Blocked`. + /// + /// Weight: `O(1)` + _i10.Assets block({required BigInt id, required _i11.MultiAddress who}) { + return _i10.Assets(_i12.Block(id: id, who: who)); + } + + /// Transfer the entire transferable balance from the caller asset account. + /// + /// NOTE: This function only attempts to transfer _transferable_ balances. This means that + /// any held, frozen, or minimum balance (when `keep_alive` is `true`), will not be + /// transferred by this function. To ensure that this function results in a killed account, + /// you might need to prepare the account by removing any reference counters, storage + /// deposits, etc... + /// + /// The dispatch origin of this call must be Signed. + /// + /// - `id`: The identifier of the asset for the account holding a deposit. + /// - `dest`: The recipient of the transfer. + /// - `keep_alive`: A boolean to determine if the `transfer_all` operation should send all + /// of the funds the asset account has, causing the sender asset account to be killed + /// (false), or transfer everything except at least the minimum balance, which will + /// guarantee to keep the sender asset account alive (true). + _i10.Assets transferAll({required BigInt id, required _i11.MultiAddress dest, required bool keepAlive}) { + return _i10.Assets(_i12.TransferAll(id: id, dest: dest, keepAlive: keepAlive)); + } +} + +class Constants { + Constants(); + + /// Max number of items to destroy per `destroy_accounts` and `destroy_approvals` call. + /// + /// Must be configured to result in a weight that makes each call fit in a block. + final int removeItemsLimit = 1000; + + /// The basic amount of funds that must be reserved for an asset. + final BigInt assetDeposit = BigInt.from(1000000000); + + /// The amount of funds that must be reserved for a non-provider asset account to be + /// maintained. + final BigInt assetAccountDeposit = BigInt.from(1000000000); + + /// The basic amount of funds that must be reserved when adding metadata to your asset. + final BigInt metadataDepositBase = BigInt.from(1000000000); + + /// The additional funds that must be reserved for the number of bytes you store in your + /// metadata. + final BigInt metadataDepositPerByte = BigInt.from(1000000000); + + /// The amount of funds that must be reserved when creating a new approval. + final BigInt approvalDeposit = BigInt.from(1000000000); + + /// The maximum length of a name or symbol stored on-chain. + final int stringLimit = 50; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/assets_holder.dart b/quantus_sdk/lib/generated/planck/pallets/assets_holder.dart new file mode 100644 index 00000000..608ecb2d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/assets_holder.dart @@ -0,0 +1,77 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i5; +import 'dart:typed_data' as _i6; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i4; + +import '../types/frame_support/traits/tokens/misc/id_amount_1.dart' as _i3; +import '../types/sp_core/crypto/account_id32.dart' as _i2; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageDoubleMap> _holds = + const _i1.StorageDoubleMap>( + prefix: 'AssetsHolder', + storage: 'Holds', + valueCodec: _i4.SequenceCodec<_i3.IdAmount>(_i3.IdAmount.codec), + hasher1: _i1.StorageHasher.blake2b128Concat(_i4.U32Codec.codec), + hasher2: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageDoubleMap _balancesOnHold = + const _i1.StorageDoubleMap( + prefix: 'AssetsHolder', + storage: 'BalancesOnHold', + valueCodec: _i4.U128Codec.codec, + hasher1: _i1.StorageHasher.blake2b128Concat(_i4.U32Codec.codec), + hasher2: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + /// A map that stores holds applied on an account for a given AssetId. + _i5.Future> holds(int key1, _i2.AccountId32 key2, {_i1.BlockHash? at}) async { + final hashedKey = _holds.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _holds.decodeValue(bytes); + } + return []; /* Default */ + } + + /// A map that stores the current total balance on hold for every account on a given AssetId. + _i5.Future balancesOnHold(int key1, _i2.AccountId32 key2, {_i1.BlockHash? at}) async { + final hashedKey = _balancesOnHold.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _balancesOnHold.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Returns the storage key for `holds`. + _i6.Uint8List holdsKey(int key1, _i2.AccountId32 key2) { + final hashedKey = _holds.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage key for `balancesOnHold`. + _i6.Uint8List balancesOnHoldKey(int key1, _i2.AccountId32 key2) { + final hashedKey = _balancesOnHold.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage map key prefix for `holds`. + _i6.Uint8List holdsMapPrefix(int key1) { + final hashedKey = _holds.mapPrefix(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `balancesOnHold`. + _i6.Uint8List balancesOnHoldMapPrefix(int key1) { + final hashedKey = _balancesOnHold.mapPrefix(key1); + return hashedKey; + } +} diff --git a/quantus_sdk/lib/generated/planck/pallets/balances.dart b/quantus_sdk/lib/generated/planck/pallets/balances.dart new file mode 100644 index 00000000..be3312a9 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/balances.dart @@ -0,0 +1,466 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i9; +import 'dart:typed_data' as _i10; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/frame_support/traits/tokens/misc/id_amount_1.dart' as _i7; +import '../types/frame_support/traits/tokens/misc/id_amount_2.dart' as _i8; +import '../types/pallet_balances/pallet/call.dart' as _i13; +import '../types/pallet_balances/types/account_data.dart' as _i4; +import '../types/pallet_balances/types/adjustment_direction.dart' as _i14; +import '../types/pallet_balances/types/balance_lock.dart' as _i5; +import '../types/pallet_balances/types/reserve_data.dart' as _i6; +import '../types/quantus_runtime/runtime_call.dart' as _i11; +import '../types/sp_core/crypto/account_id32.dart' as _i3; +import '../types/sp_runtime/multiaddress/multi_address.dart' as _i12; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue _totalIssuance = const _i1.StorageValue( + prefix: 'Balances', + storage: 'TotalIssuance', + valueCodec: _i2.U128Codec.codec, + ); + + final _i1.StorageValue _inactiveIssuance = const _i1.StorageValue( + prefix: 'Balances', + storage: 'InactiveIssuance', + valueCodec: _i2.U128Codec.codec, + ); + + final _i1.StorageMap<_i3.AccountId32, _i4.AccountData> _account = + const _i1.StorageMap<_i3.AccountId32, _i4.AccountData>( + prefix: 'Balances', + storage: 'Account', + valueCodec: _i4.AccountData.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i3.AccountId32Codec()), + ); + + final _i1.StorageMap<_i3.AccountId32, List<_i5.BalanceLock>> _locks = + const _i1.StorageMap<_i3.AccountId32, List<_i5.BalanceLock>>( + prefix: 'Balances', + storage: 'Locks', + valueCodec: _i2.SequenceCodec<_i5.BalanceLock>(_i5.BalanceLock.codec), + hasher: _i1.StorageHasher.blake2b128Concat(_i3.AccountId32Codec()), + ); + + final _i1.StorageMap<_i3.AccountId32, List<_i6.ReserveData>> _reserves = + const _i1.StorageMap<_i3.AccountId32, List<_i6.ReserveData>>( + prefix: 'Balances', + storage: 'Reserves', + valueCodec: _i2.SequenceCodec<_i6.ReserveData>(_i6.ReserveData.codec), + hasher: _i1.StorageHasher.blake2b128Concat(_i3.AccountId32Codec()), + ); + + final _i1.StorageMap<_i3.AccountId32, List<_i7.IdAmount>> _holds = + const _i1.StorageMap<_i3.AccountId32, List<_i7.IdAmount>>( + prefix: 'Balances', + storage: 'Holds', + valueCodec: _i2.SequenceCodec<_i7.IdAmount>(_i7.IdAmount.codec), + hasher: _i1.StorageHasher.blake2b128Concat(_i3.AccountId32Codec()), + ); + + final _i1.StorageMap<_i3.AccountId32, List<_i8.IdAmount>> _freezes = + const _i1.StorageMap<_i3.AccountId32, List<_i8.IdAmount>>( + prefix: 'Balances', + storage: 'Freezes', + valueCodec: _i2.SequenceCodec<_i8.IdAmount>(_i8.IdAmount.codec), + hasher: _i1.StorageHasher.blake2b128Concat(_i3.AccountId32Codec()), + ); + + /// The total units issued in the system. + _i9.Future totalIssuance({_i1.BlockHash? at}) async { + final hashedKey = _totalIssuance.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _totalIssuance.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + /// The total units of outstanding deactivated balance in the system. + _i9.Future inactiveIssuance({_i1.BlockHash? at}) async { + final hashedKey = _inactiveIssuance.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _inactiveIssuance.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + /// The Balances pallet example of storing the balance of an account. + /// + /// # Example + /// + /// ```nocompile + /// impl pallet_balances::Config for Runtime { + /// type AccountStore = StorageMapShim, frame_system::Provider, AccountId, Self::AccountData> + /// } + /// ``` + /// + /// You can also store the balance of an account in the `System` pallet. + /// + /// # Example + /// + /// ```nocompile + /// impl pallet_balances::Config for Runtime { + /// type AccountStore = System + /// } + /// ``` + /// + /// But this comes with tradeoffs, storing account balances in the system pallet stores + /// `frame_system` data alongside the account data contrary to storing account balances in the + /// `Balances` pallet, which uses a `StorageMap` to store balances data only. + /// NOTE: This is only used in the case that this pallet is used to store balances. + _i9.Future<_i4.AccountData> account(_i3.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _account.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _account.decodeValue(bytes); + } + return _i4.AccountData( + free: BigInt.zero, + reserved: BigInt.zero, + frozen: BigInt.zero, + flags: BigInt.parse('170141183460469231731687303715884105728', radix: 10), + ); /* Default */ + } + + /// Any liquidity locks on some account balances. + /// NOTE: Should only be accessed when setting, changing and freeing a lock. + /// + /// Use of locks is deprecated in favour of freezes. See `https://github.com/paritytech/substrate/pull/12951/` + _i9.Future> locks(_i3.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _locks.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _locks.decodeValue(bytes); + } + return []; /* Default */ + } + + /// Named reserves on some account balances. + /// + /// Use of reserves is deprecated in favour of holds. See `https://github.com/paritytech/substrate/pull/12951/` + _i9.Future> reserves(_i3.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _reserves.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _reserves.decodeValue(bytes); + } + return []; /* Default */ + } + + /// Holds on account balances. + _i9.Future> holds(_i3.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _holds.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _holds.decodeValue(bytes); + } + return []; /* Default */ + } + + /// Freeze locks on account balances. + _i9.Future> freezes(_i3.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _freezes.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _freezes.decodeValue(bytes); + } + return []; /* Default */ + } + + /// The Balances pallet example of storing the balance of an account. + /// + /// # Example + /// + /// ```nocompile + /// impl pallet_balances::Config for Runtime { + /// type AccountStore = StorageMapShim, frame_system::Provider, AccountId, Self::AccountData> + /// } + /// ``` + /// + /// You can also store the balance of an account in the `System` pallet. + /// + /// # Example + /// + /// ```nocompile + /// impl pallet_balances::Config for Runtime { + /// type AccountStore = System + /// } + /// ``` + /// + /// But this comes with tradeoffs, storing account balances in the system pallet stores + /// `frame_system` data alongside the account data contrary to storing account balances in the + /// `Balances` pallet, which uses a `StorageMap` to store balances data only. + /// NOTE: This is only used in the case that this pallet is used to store balances. + _i9.Future> multiAccount(List<_i3.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _account.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _account.decodeValue(v.key)).toList(); + } + return (keys + .map( + (key) => _i4.AccountData( + free: BigInt.zero, + reserved: BigInt.zero, + frozen: BigInt.zero, + flags: BigInt.parse('170141183460469231731687303715884105728', radix: 10), + ), + ) + .toList() + as List<_i4.AccountData>); /* Default */ + } + + /// Any liquidity locks on some account balances. + /// NOTE: Should only be accessed when setting, changing and freeing a lock. + /// + /// Use of locks is deprecated in favour of freezes. See `https://github.com/paritytech/substrate/pull/12951/` + _i9.Future>> multiLocks(List<_i3.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _locks.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _locks.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>); /* Default */ + } + + /// Named reserves on some account balances. + /// + /// Use of reserves is deprecated in favour of holds. See `https://github.com/paritytech/substrate/pull/12951/` + _i9.Future>> multiReserves(List<_i3.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _reserves.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _reserves.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>); /* Default */ + } + + /// Holds on account balances. + _i9.Future>> multiHolds(List<_i3.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _holds.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _holds.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>); /* Default */ + } + + /// Freeze locks on account balances. + _i9.Future>> multiFreezes(List<_i3.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _freezes.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _freezes.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>); /* Default */ + } + + /// Returns the storage key for `totalIssuance`. + _i10.Uint8List totalIssuanceKey() { + final hashedKey = _totalIssuance.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `inactiveIssuance`. + _i10.Uint8List inactiveIssuanceKey() { + final hashedKey = _inactiveIssuance.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `account`. + _i10.Uint8List accountKey(_i3.AccountId32 key1) { + final hashedKey = _account.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `locks`. + _i10.Uint8List locksKey(_i3.AccountId32 key1) { + final hashedKey = _locks.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `reserves`. + _i10.Uint8List reservesKey(_i3.AccountId32 key1) { + final hashedKey = _reserves.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `holds`. + _i10.Uint8List holdsKey(_i3.AccountId32 key1) { + final hashedKey = _holds.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `freezes`. + _i10.Uint8List freezesKey(_i3.AccountId32 key1) { + final hashedKey = _freezes.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `account`. + _i10.Uint8List accountMapPrefix() { + final hashedKey = _account.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `locks`. + _i10.Uint8List locksMapPrefix() { + final hashedKey = _locks.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `reserves`. + _i10.Uint8List reservesMapPrefix() { + final hashedKey = _reserves.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `holds`. + _i10.Uint8List holdsMapPrefix() { + final hashedKey = _holds.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `freezes`. + _i10.Uint8List freezesMapPrefix() { + final hashedKey = _freezes.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Transfer some liquid free balance to another account. + /// + /// `transfer_allow_death` will set the `FreeBalance` of the sender and receiver. + /// If the sender's account is below the existential deposit as a result + /// of the transfer, the account will be reaped. + /// + /// The dispatch origin for this call must be `Signed` by the transactor. + _i11.Balances transferAllowDeath({required _i12.MultiAddress dest, required BigInt value}) { + return _i11.Balances(_i13.TransferAllowDeath(dest: dest, value: value)); + } + + /// Exactly as `transfer_allow_death`, except the origin must be root and the source account + /// may be specified. + _i11.Balances forceTransfer({ + required _i12.MultiAddress source, + required _i12.MultiAddress dest, + required BigInt value, + }) { + return _i11.Balances(_i13.ForceTransfer(source: source, dest: dest, value: value)); + } + + /// Same as the [`transfer_allow_death`] call, but with a check that the transfer will not + /// kill the origin account. + /// + /// 99% of the time you want [`transfer_allow_death`] instead. + /// + /// [`transfer_allow_death`]: struct.Pallet.html#method.transfer + _i11.Balances transferKeepAlive({required _i12.MultiAddress dest, required BigInt value}) { + return _i11.Balances(_i13.TransferKeepAlive(dest: dest, value: value)); + } + + /// Transfer the entire transferable balance from the caller account. + /// + /// NOTE: This function only attempts to transfer _transferable_ balances. This means that + /// any locked, reserved, or existential deposits (when `keep_alive` is `true`), will not be + /// transferred by this function. To ensure that this function results in a killed account, + /// you might need to prepare the account by removing any reference counters, storage + /// deposits, etc... + /// + /// The dispatch origin of this call must be Signed. + /// + /// - `dest`: The recipient of the transfer. + /// - `keep_alive`: A boolean to determine if the `transfer_all` operation should send all + /// of the funds the account has, causing the sender account to be killed (false), or + /// transfer everything except at least the existential deposit, which will guarantee to + /// keep the sender account alive (true). + _i11.Balances transferAll({required _i12.MultiAddress dest, required bool keepAlive}) { + return _i11.Balances(_i13.TransferAll(dest: dest, keepAlive: keepAlive)); + } + + /// Unreserve some balance from a user by force. + /// + /// Can only be called by ROOT. + _i11.Balances forceUnreserve({required _i12.MultiAddress who, required BigInt amount}) { + return _i11.Balances(_i13.ForceUnreserve(who: who, amount: amount)); + } + + /// Upgrade a specified account. + /// + /// - `origin`: Must be `Signed`. + /// - `who`: The account to be upgraded. + /// + /// This will waive the transaction fee if at least all but 10% of the accounts needed to + /// be upgraded. (We let some not have to be upgraded just in order to allow for the + /// possibility of churn). + _i11.Balances upgradeAccounts({required List<_i3.AccountId32> who}) { + return _i11.Balances(_i13.UpgradeAccounts(who: who)); + } + + /// Set the regular balance of a given account. + /// + /// The dispatch origin for this call is `root`. + _i11.Balances forceSetBalance({required _i12.MultiAddress who, required BigInt newFree}) { + return _i11.Balances(_i13.ForceSetBalance(who: who, newFree: newFree)); + } + + /// Adjust the total issuance in a saturating way. + /// + /// Can only be called by root and always needs a positive `delta`. + /// + /// # Example + _i11.Balances forceAdjustTotalIssuance({required _i14.AdjustmentDirection direction, required BigInt delta}) { + return _i11.Balances(_i13.ForceAdjustTotalIssuance(direction: direction, delta: delta)); + } + + /// Burn the specified liquid free balance from the origin account. + /// + /// If the origin's account ends up below the existential deposit as a result + /// of the burn and `keep_alive` is false, the account will be reaped. + /// + /// Unlike sending funds to a _burn_ address, which merely makes the funds inaccessible, + /// this `burn` operation will reduce total issuance by the amount _burned_. + _i11.Balances burn({required BigInt value, required bool keepAlive}) { + return _i11.Balances(_i13.Burn(value: value, keepAlive: keepAlive)); + } +} + +class Constants { + Constants(); + + /// The minimum amount required to keep an account open. MUST BE GREATER THAN ZERO! + /// + /// If you *really* need it to be zero, you can enable the feature `insecure_zero_ed` for + /// this pallet. However, you do so at your own risk: this will open up a major DoS vector. + /// In case you have multiple sources of provider references, you may also get unexpected + /// behaviour if you set this to zero. + /// + /// Bottom line: Do yourself a favour and make it at least one! + final BigInt existentialDeposit = BigInt.from(1000000000); + + /// The maximum number of locks that should exist on an account. + /// Not strictly enforced, but used for weight estimation. + /// + /// Use of locks is deprecated in favour of freezes. See `https://github.com/paritytech/substrate/pull/12951/` + final int maxLocks = 50; + + /// The maximum number of named reserves that can exist on an account. + /// + /// Use of reserves is deprecated in favour of holds. See `https://github.com/paritytech/substrate/pull/12951/` + final int maxReserves = 0; + + /// The maximum number of individual freeze locks that can exist on an account at any time. + final int maxFreezes = 0; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/conviction_voting.dart b/quantus_sdk/lib/generated/planck/pallets/conviction_voting.dart new file mode 100644 index 00000000..5b01a8c0 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/conviction_voting.dart @@ -0,0 +1,260 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i6; +import 'dart:typed_data' as _i10; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i4; + +import '../types/pallet_conviction_voting/conviction/conviction.dart' as _i15; +import '../types/pallet_conviction_voting/pallet/call.dart' as _i13; +import '../types/pallet_conviction_voting/types/delegations.dart' as _i8; +import '../types/pallet_conviction_voting/vote/account_vote.dart' as _i12; +import '../types/pallet_conviction_voting/vote/casting.dart' as _i7; +import '../types/pallet_conviction_voting/vote/prior_lock.dart' as _i9; +import '../types/pallet_conviction_voting/vote/voting.dart' as _i3; +import '../types/quantus_runtime/runtime_call.dart' as _i11; +import '../types/sp_core/crypto/account_id32.dart' as _i2; +import '../types/sp_runtime/multiaddress/multi_address.dart' as _i14; +import '../types/tuples.dart' as _i5; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageDoubleMap<_i2.AccountId32, int, _i3.Voting> _votingFor = + const _i1.StorageDoubleMap<_i2.AccountId32, int, _i3.Voting>( + prefix: 'ConvictionVoting', + storage: 'VotingFor', + valueCodec: _i3.Voting.codec, + hasher1: _i1.StorageHasher.twoxx64Concat(_i2.AccountId32Codec()), + hasher2: _i1.StorageHasher.twoxx64Concat(_i4.U16Codec.codec), + ); + + final _i1.StorageMap<_i2.AccountId32, List<_i5.Tuple2>> _classLocksFor = + const _i1.StorageMap<_i2.AccountId32, List<_i5.Tuple2>>( + prefix: 'ConvictionVoting', + storage: 'ClassLocksFor', + valueCodec: _i4.SequenceCodec<_i5.Tuple2>( + _i5.Tuple2Codec(_i4.U16Codec.codec, _i4.U128Codec.codec), + ), + hasher: _i1.StorageHasher.twoxx64Concat(_i2.AccountId32Codec()), + ); + + /// All voting for a particular voter in a particular voting class. We store the balance for the + /// number of votes that we have recorded. + _i6.Future<_i3.Voting> votingFor(_i2.AccountId32 key1, int key2, {_i1.BlockHash? at}) async { + final hashedKey = _votingFor.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _votingFor.decodeValue(bytes); + } + return _i3.Casting( + _i7.Casting( + votes: [], + delegations: _i8.Delegations(votes: BigInt.zero, capital: BigInt.zero), + prior: _i9.PriorLock(0, BigInt.zero), + ), + ); /* Default */ + } + + /// The voting classes which have a non-zero lock requirement and the lock amounts which they + /// require. The actual amount locked on behalf of this pallet should always be the maximum of + /// this list. + _i6.Future>> classLocksFor(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _classLocksFor.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _classLocksFor.decodeValue(bytes); + } + return []; /* Default */ + } + + /// The voting classes which have a non-zero lock requirement and the lock amounts which they + /// require. The actual amount locked on behalf of this pallet should always be the maximum of + /// this list. + _i6.Future>>> multiClassLocksFor( + List<_i2.AccountId32> keys, { + _i1.BlockHash? at, + }) async { + final hashedKeys = keys.map((key) => _classLocksFor.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _classLocksFor.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>>); /* Default */ + } + + /// Returns the storage key for `votingFor`. + _i10.Uint8List votingForKey(_i2.AccountId32 key1, int key2) { + final hashedKey = _votingFor.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage key for `classLocksFor`. + _i10.Uint8List classLocksForKey(_i2.AccountId32 key1) { + final hashedKey = _classLocksFor.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `votingFor`. + _i10.Uint8List votingForMapPrefix(_i2.AccountId32 key1) { + final hashedKey = _votingFor.mapPrefix(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `classLocksFor`. + _i10.Uint8List classLocksForMapPrefix() { + final hashedKey = _classLocksFor.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Vote in a poll. If `vote.is_aye()`, the vote is to enact the proposal; + /// otherwise it is a vote to keep the status quo. + /// + /// The dispatch origin of this call must be _Signed_. + /// + /// - `poll_index`: The index of the poll to vote for. + /// - `vote`: The vote configuration. + /// + /// Weight: `O(R)` where R is the number of polls the voter has voted on. + _i11.ConvictionVoting vote({required BigInt pollIndex, required _i12.AccountVote vote}) { + return _i11.ConvictionVoting(_i13.Vote(pollIndex: pollIndex, vote: vote)); + } + + /// Delegate the voting power (with some given conviction) of the sending account for a + /// particular class of polls. + /// + /// The balance delegated is locked for as long as it's delegated, and thereafter for the + /// time appropriate for the conviction's lock period. + /// + /// The dispatch origin of this call must be _Signed_, and the signing account must either: + /// - be delegating already; or + /// - have no voting activity (if there is, then it will need to be removed through + /// `remove_vote`). + /// + /// - `to`: The account whose voting the `target` account's voting power will follow. + /// - `class`: The class of polls to delegate. To delegate multiple classes, multiple calls + /// to this function are required. + /// - `conviction`: The conviction that will be attached to the delegated votes. When the + /// account is undelegated, the funds will be locked for the corresponding period. + /// - `balance`: The amount of the account's balance to be used in delegating. This must not + /// be more than the account's current balance. + /// + /// Emits `Delegated`. + /// + /// Weight: `O(R)` where R is the number of polls the voter delegating to has + /// voted on. Weight is initially charged as if maximum votes, but is refunded later. + _i11.ConvictionVoting delegate({ + required int class_, + required _i14.MultiAddress to, + required _i15.Conviction conviction, + required BigInt balance, + }) { + return _i11.ConvictionVoting(_i13.Delegate(class_: class_, to: to, conviction: conviction, balance: balance)); + } + + /// Undelegate the voting power of the sending account for a particular class of polls. + /// + /// Tokens may be unlocked following once an amount of time consistent with the lock period + /// of the conviction with which the delegation was issued has passed. + /// + /// The dispatch origin of this call must be _Signed_ and the signing account must be + /// currently delegating. + /// + /// - `class`: The class of polls to remove the delegation from. + /// + /// Emits `Undelegated`. + /// + /// Weight: `O(R)` where R is the number of polls the voter delegating to has + /// voted on. Weight is initially charged as if maximum votes, but is refunded later. + _i11.ConvictionVoting undelegate({required int class_}) { + return _i11.ConvictionVoting(_i13.Undelegate(class_: class_)); + } + + /// Remove the lock caused by prior voting/delegating which has expired within a particular + /// class. + /// + /// The dispatch origin of this call must be _Signed_. + /// + /// - `class`: The class of polls to unlock. + /// - `target`: The account to remove the lock on. + /// + /// Weight: `O(R)` with R number of vote of target. + _i11.ConvictionVoting unlock({required int class_, required _i14.MultiAddress target}) { + return _i11.ConvictionVoting(_i13.Unlock(class_: class_, target: target)); + } + + /// Remove a vote for a poll. + /// + /// If: + /// - the poll was cancelled, or + /// - the poll is ongoing, or + /// - the poll has ended such that + /// - the vote of the account was in opposition to the result; or + /// - there was no conviction to the account's vote; or + /// - the account made a split vote + /// ...then the vote is removed cleanly and a following call to `unlock` may result in more + /// funds being available. + /// + /// If, however, the poll has ended and: + /// - it finished corresponding to the vote of the account, and + /// - the account made a standard vote with conviction, and + /// - the lock period of the conviction is not over + /// ...then the lock will be aggregated into the overall account's lock, which may involve + /// *overlocking* (where the two locks are combined into a single lock that is the maximum + /// of both the amount locked and the time is it locked for). + /// + /// The dispatch origin of this call must be _Signed_, and the signer must have a vote + /// registered for poll `index`. + /// + /// - `index`: The index of poll of the vote to be removed. + /// - `class`: Optional parameter, if given it indicates the class of the poll. For polls + /// which have finished or are cancelled, this must be `Some`. + /// + /// Weight: `O(R + log R)` where R is the number of polls that `target` has voted on. + /// Weight is calculated for the maximum number of vote. + _i11.ConvictionVoting removeVote({int? class_, required int index}) { + return _i11.ConvictionVoting(_i13.RemoveVote(class_: class_, index: index)); + } + + /// Remove a vote for a poll. + /// + /// If the `target` is equal to the signer, then this function is exactly equivalent to + /// `remove_vote`. If not equal to the signer, then the vote must have expired, + /// either because the poll was cancelled, because the voter lost the poll or + /// because the conviction period is over. + /// + /// The dispatch origin of this call must be _Signed_. + /// + /// - `target`: The account of the vote to be removed; this account must have voted for poll + /// `index`. + /// - `index`: The index of poll of the vote to be removed. + /// - `class`: The class of the poll. + /// + /// Weight: `O(R + log R)` where R is the number of polls that `target` has voted on. + /// Weight is calculated for the maximum number of vote. + _i11.ConvictionVoting removeOtherVote({required _i14.MultiAddress target, required int class_, required int index}) { + return _i11.ConvictionVoting(_i13.RemoveOtherVote(target: target, class_: class_, index: index)); + } +} + +class Constants { + Constants(); + + /// The maximum number of concurrent votes an account may have. + /// + /// Also used to compute weight, an overly large value can lead to extrinsics with large + /// weight estimation: see `delegate` for instance. + final int maxVotes = 4096; + + /// The minimum period of vote locking. + /// + /// It should be no shorter than enactment period to ensure that in the case of an approval, + /// those successful voters are locked into the consequences that their votes entail. + final int voteLockingPeriod = 50400; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/mining_rewards.dart b/quantus_sdk/lib/generated/planck/pallets/mining_rewards.dart new file mode 100644 index 00000000..ce7825c3 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/mining_rewards.dart @@ -0,0 +1,84 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i3; +import 'dart:typed_data' as _i4; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/sp_core/crypto/account_id32.dart' as _i5; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue _collectedFees = const _i1.StorageValue( + prefix: 'MiningRewards', + storage: 'CollectedFees', + valueCodec: _i2.U128Codec.codec, + ); + + _i3.Future collectedFees({_i1.BlockHash? at}) async { + final hashedKey = _collectedFees.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _collectedFees.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + /// Returns the storage key for `collectedFees`. + _i4.Uint8List collectedFeesKey() { + final hashedKey = _collectedFees.hashedKey(); + return hashedKey; + } +} + +class Constants { + Constants(); + + /// The maximum total supply of tokens + final BigInt maxSupply = BigInt.parse('21000000000000000000', radix: 10); + + /// The divisor used to calculate block rewards from remaining supply + final BigInt emissionDivisor = BigInt.from(26280000); + + /// The base unit for token amounts (e.g., 1e12 for 12 decimals) + final BigInt unit = BigInt.from(1000000000000); + + /// Account ID used as the "from" account when creating transfer proofs for minted tokens + final _i5.AccountId32 mintingAccount = const [ + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + ]; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/multisig.dart b/quantus_sdk/lib/generated/planck/pallets/multisig.dart new file mode 100644 index 00000000..d6de5996 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/multisig.dart @@ -0,0 +1,308 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i6; +import 'dart:typed_data' as _i7; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i5; + +import '../types/frame_support/pallet_id.dart' as _i11; +import '../types/pallet_multisig/multisig_data.dart' as _i3; +import '../types/pallet_multisig/pallet/call.dart' as _i9; +import '../types/pallet_multisig/proposal_data.dart' as _i4; +import '../types/quantus_runtime/runtime_call.dart' as _i8; +import '../types/sp_arithmetic/per_things/permill.dart' as _i10; +import '../types/sp_core/crypto/account_id32.dart' as _i2; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageMap<_i2.AccountId32, _i3.MultisigData> _multisigs = + const _i1.StorageMap<_i2.AccountId32, _i3.MultisigData>( + prefix: 'Multisig', + storage: 'Multisigs', + valueCodec: _i3.MultisigData.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageDoubleMap<_i2.AccountId32, int, _i4.ProposalData> _proposals = + const _i1.StorageDoubleMap<_i2.AccountId32, int, _i4.ProposalData>( + prefix: 'Multisig', + storage: 'Proposals', + valueCodec: _i4.ProposalData.codec, + hasher1: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + hasher2: _i1.StorageHasher.twoxx64Concat(_i5.U32Codec.codec), + ); + + final _i1.StorageMap<_i2.AccountId32, List<_i2.AccountId32>> _dissolveApprovals = + const _i1.StorageMap<_i2.AccountId32, List<_i2.AccountId32>>( + prefix: 'Multisig', + storage: 'DissolveApprovals', + valueCodec: _i5.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()), + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + /// Multisigs stored by their deterministic address + _i6.Future<_i3.MultisigData?> multisigs(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _multisigs.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _multisigs.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Proposals indexed by (multisig_address, proposal_nonce) + _i6.Future<_i4.ProposalData?> proposals(_i2.AccountId32 key1, int key2, {_i1.BlockHash? at}) async { + final hashedKey = _proposals.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _proposals.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Dissolve approvals: tracks which signers approved dissolving the multisig + /// Maps multisig_address -> Vec + _i6.Future?> dissolveApprovals(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _dissolveApprovals.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _dissolveApprovals.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Multisigs stored by their deterministic address + _i6.Future> multiMultisigs(List<_i2.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _multisigs.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _multisigs.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Dissolve approvals: tracks which signers approved dissolving the multisig + /// Maps multisig_address -> Vec + _i6.Future?>> multiDissolveApprovals( + List<_i2.AccountId32> keys, { + _i1.BlockHash? at, + }) async { + final hashedKeys = keys.map((key) => _dissolveApprovals.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _dissolveApprovals.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Returns the storage key for `multisigs`. + _i7.Uint8List multisigsKey(_i2.AccountId32 key1) { + final hashedKey = _multisigs.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `proposals`. + _i7.Uint8List proposalsKey(_i2.AccountId32 key1, int key2) { + final hashedKey = _proposals.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage key for `dissolveApprovals`. + _i7.Uint8List dissolveApprovalsKey(_i2.AccountId32 key1) { + final hashedKey = _dissolveApprovals.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `multisigs`. + _i7.Uint8List multisigsMapPrefix() { + final hashedKey = _multisigs.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `proposals`. + _i7.Uint8List proposalsMapPrefix(_i2.AccountId32 key1) { + final hashedKey = _proposals.mapPrefix(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `dissolveApprovals`. + _i7.Uint8List dissolveApprovalsMapPrefix() { + final hashedKey = _dissolveApprovals.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Create a new multisig account with deterministic address + /// + /// Parameters: + /// - `signers`: List of accounts that can sign for this multisig + /// - `threshold`: Number of approvals required to execute transactions + /// - `nonce`: User-provided nonce for address uniqueness + /// + /// The multisig address is deterministically derived from: + /// hash(pallet_id || sorted_signers || threshold || nonce) + /// + /// Signers are automatically sorted before hashing, so order doesn't matter. + /// + /// Economic costs: + /// - MultisigFee: burned immediately (spam prevention) + /// - MultisigDeposit: reserved until dissolution, then returned to creator (storage bond) + _i8.Multisig createMultisig({required List<_i2.AccountId32> signers, required int threshold, required BigInt nonce}) { + return _i8.Multisig(_i9.CreateMultisig(signers: signers, threshold: threshold, nonce: nonce)); + } + + /// Propose a transaction to be executed by the multisig + /// + /// Parameters: + /// - `multisig_address`: The multisig account that will execute the call + /// - `call`: The encoded call to execute + /// - `expiry`: Block number when this proposal expires + /// + /// The proposer must be a signer and must pay: + /// - A deposit (refundable - returned immediately on execution/cancellation) + /// - A fee (non-refundable, burned immediately) + /// + /// **Auto-cleanup:** Before creating a new proposal, ALL proposer's expired + /// proposals are automatically removed. This is the primary cleanup mechanism. + /// + /// **For threshold=1:** If the multisig threshold is 1, the proposal executes immediately. + /// + /// **Weight:** Charged upfront for worst-case (high-security path with decode). + /// Refunded to actual cost on success based on whether HS path was taken. + _i8.Multisig propose({required _i2.AccountId32 multisigAddress, required List call, required int expiry}) { + return _i8.Multisig(_i9.Propose(multisigAddress: multisigAddress, call: call, expiry: expiry)); + } + + /// Approve a proposed transaction + /// + /// If this approval brings the total approvals to or above the threshold, + /// the proposal status changes to `Approved` and can be executed via `execute()`. + /// + /// Parameters: + /// - `multisig_address`: The multisig account + /// - `proposal_id`: ID (nonce) of the proposal to approve + /// + /// Weight: Charges for MAX call size, refunds based on actual + _i8.Multisig approve({required _i2.AccountId32 multisigAddress, required int proposalId}) { + return _i8.Multisig(_i9.Approve(multisigAddress: multisigAddress, proposalId: proposalId)); + } + + /// Cancel a proposed transaction (only by proposer) + /// + /// Parameters: + /// - `multisig_address`: The multisig account + /// - `proposal_id`: ID (nonce) of the proposal to cancel + _i8.Multisig cancel({required _i2.AccountId32 multisigAddress, required int proposalId}) { + return _i8.Multisig(_i9.Cancel(multisigAddress: multisigAddress, proposalId: proposalId)); + } + + /// Remove expired proposals and return deposits to proposers + /// + /// Can only be called by signers of the multisig. + /// Only removes Active proposals that have expired (past expiry block). + /// Executed and Cancelled proposals are automatically cleaned up immediately. + /// + /// The deposit is always returned to the original proposer, not the caller. + /// This allows any signer to help clean up storage even if proposer is inactive. + _i8.Multisig removeExpired({required _i2.AccountId32 multisigAddress, required int proposalId}) { + return _i8.Multisig(_i9.RemoveExpired(multisigAddress: multisigAddress, proposalId: proposalId)); + } + + /// Claim all deposits from expired proposals + /// + /// This is a batch operation that removes all expired proposals where: + /// - Caller is the proposer + /// - Proposal is Active and past expiry block + /// + /// Note: Executed and Cancelled proposals are automatically cleaned up immediately, + /// so only Active+Expired proposals need manual cleanup. + /// + /// Returns all proposal deposits to the proposer in a single transaction. + _i8.Multisig claimDeposits({required _i2.AccountId32 multisigAddress}) { + return _i8.Multisig(_i9.ClaimDeposits(multisigAddress: multisigAddress)); + } + + /// Execute an approved proposal + /// + /// Can be called by any signer of the multisig once the proposal has reached + /// the approval threshold (status = Approved). The proposal must not be expired. + /// + /// On execution: + /// - The call is decoded and dispatched as the multisig account + /// - Proposal is removed from storage + /// - Deposit is returned to the proposer + /// + /// Parameters: + /// - `multisig_address`: The multisig account + /// - `proposal_id`: ID (nonce) of the proposal to execute + _i8.Multisig execute({required _i2.AccountId32 multisigAddress, required int proposalId}) { + return _i8.Multisig(_i9.Execute(multisigAddress: multisigAddress, proposalId: proposalId)); + } + + /// Approve dissolving a multisig account + /// + /// Signers call this to approve dissolving the multisig. + /// When threshold is reached, the multisig is automatically dissolved. + /// + /// Requirements: + /// - Caller must be a signer + /// - No proposals exist (active, executed, or cancelled) - must be fully cleaned up + /// - Multisig account balance must be zero + /// + /// When threshold is reached: + /// - Deposit is returned to creator + /// - Multisig storage is removed + _i8.Multisig approveDissolve({required _i2.AccountId32 multisigAddress}) { + return _i8.Multisig(_i9.ApproveDissolve(multisigAddress: multisigAddress)); + } +} + +class Constants { + Constants(); + + /// Maximum number of signers allowed in a multisig + final int maxSigners = 100; + + /// Maximum total number of proposals in storage per multisig (Active + Executed + + /// Cancelled) This prevents unbounded storage growth and incentivizes cleanup + final int maxTotalProposalsInStorage = 200; + + /// Maximum size of an encoded call + final int maxCallSize = 10240; + + /// Fee charged for creating a multisig (non-refundable, burned) + final BigInt multisigFee = BigInt.from(100000000000); + + /// Deposit reserved for creating a multisig (returned when dissolved). + /// Keeps the state clean by incentivizing removal of unused multisigs. + final BigInt multisigDeposit = BigInt.from(500000000000); + + /// Deposit required per proposal (returned on execute or cancel) + final BigInt proposalDeposit = BigInt.from(1000000000000); + + /// Fee charged for creating a proposal (non-refundable, paid always) + final BigInt proposalFee = BigInt.from(1000000000000); + + /// Percentage increase in ProposalFee for each signer in the multisig. + /// + /// Formula: `FinalFee = ProposalFee + (ProposalFee * SignerCount * SignerStepFactor)` + /// Example: If Fee=100, Signers=5, Factor=1%, then Extra = 100 * 5 * 0.01 = 5. Total = 105. + final _i10.Permill signerStepFactor = 10000; + + /// Pallet ID for generating multisig addresses + final _i11.PalletId palletId = const [112, 121, 47, 109, 108, 116, 115, 103]; + + /// Maximum duration (in blocks) that a proposal can be set to expire in the future. + /// This prevents proposals from being created with extremely far expiry dates + /// that would lock deposits and bloat storage for extended periods. + /// + /// Example: If set to 100_000 blocks (~2 weeks at 12s blocks), + /// a proposal created at block 1000 cannot have expiry > 101_000. + final int maxExpiryDuration = 100800; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/preimage.dart b/quantus_sdk/lib/generated/planck/pallets/preimage.dart new file mode 100644 index 00000000..d0a8f646 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/preimage.dart @@ -0,0 +1,181 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i7; +import 'dart:typed_data' as _i8; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i6; + +import '../types/pallet_preimage/old_request_status.dart' as _i3; +import '../types/pallet_preimage/pallet/call.dart' as _i10; +import '../types/pallet_preimage/request_status.dart' as _i4; +import '../types/primitive_types/h256.dart' as _i2; +import '../types/quantus_runtime/runtime_call.dart' as _i9; +import '../types/tuples.dart' as _i5; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageMap<_i2.H256, _i3.OldRequestStatus> _statusFor = + const _i1.StorageMap<_i2.H256, _i3.OldRequestStatus>( + prefix: 'Preimage', + storage: 'StatusFor', + valueCodec: _i3.OldRequestStatus.codec, + hasher: _i1.StorageHasher.identity(_i2.H256Codec()), + ); + + final _i1.StorageMap<_i2.H256, _i4.RequestStatus> _requestStatusFor = + const _i1.StorageMap<_i2.H256, _i4.RequestStatus>( + prefix: 'Preimage', + storage: 'RequestStatusFor', + valueCodec: _i4.RequestStatus.codec, + hasher: _i1.StorageHasher.identity(_i2.H256Codec()), + ); + + final _i1.StorageMap<_i5.Tuple2<_i2.H256, int>, List> _preimageFor = + const _i1.StorageMap<_i5.Tuple2<_i2.H256, int>, List>( + prefix: 'Preimage', + storage: 'PreimageFor', + valueCodec: _i6.U8SequenceCodec.codec, + hasher: _i1.StorageHasher.identity(_i5.Tuple2Codec<_i2.H256, int>(_i2.H256Codec(), _i6.U32Codec.codec)), + ); + + /// The request status of a given hash. + _i7.Future<_i3.OldRequestStatus?> statusFor(_i2.H256 key1, {_i1.BlockHash? at}) async { + final hashedKey = _statusFor.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _statusFor.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The request status of a given hash. + _i7.Future<_i4.RequestStatus?> requestStatusFor(_i2.H256 key1, {_i1.BlockHash? at}) async { + final hashedKey = _requestStatusFor.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _requestStatusFor.decodeValue(bytes); + } + return null; /* Nullable */ + } + + _i7.Future?> preimageFor(_i5.Tuple2<_i2.H256, int> key1, {_i1.BlockHash? at}) async { + final hashedKey = _preimageFor.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _preimageFor.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The request status of a given hash. + _i7.Future> multiStatusFor(List<_i2.H256> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _statusFor.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _statusFor.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// The request status of a given hash. + _i7.Future> multiRequestStatusFor(List<_i2.H256> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _requestStatusFor.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _requestStatusFor.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + _i7.Future?>> multiPreimageFor(List<_i5.Tuple2<_i2.H256, int>> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _preimageFor.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _preimageFor.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Returns the storage key for `statusFor`. + _i8.Uint8List statusForKey(_i2.H256 key1) { + final hashedKey = _statusFor.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `requestStatusFor`. + _i8.Uint8List requestStatusForKey(_i2.H256 key1) { + final hashedKey = _requestStatusFor.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `preimageFor`. + _i8.Uint8List preimageForKey(_i5.Tuple2<_i2.H256, int> key1) { + final hashedKey = _preimageFor.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `statusFor`. + _i8.Uint8List statusForMapPrefix() { + final hashedKey = _statusFor.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `requestStatusFor`. + _i8.Uint8List requestStatusForMapPrefix() { + final hashedKey = _requestStatusFor.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `preimageFor`. + _i8.Uint8List preimageForMapPrefix() { + final hashedKey = _preimageFor.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Register a preimage on-chain. + /// + /// If the preimage was previously requested, no fees or deposits are taken for providing + /// the preimage. Otherwise, a deposit is taken proportional to the size of the preimage. + _i9.Preimage notePreimage({required List bytes}) { + return _i9.Preimage(_i10.NotePreimage(bytes: bytes)); + } + + /// Clear an unrequested preimage from the runtime storage. + /// + /// If `len` is provided, then it will be a much cheaper operation. + /// + /// - `hash`: The hash of the preimage to be removed from the store. + /// - `len`: The length of the preimage of `hash`. + _i9.Preimage unnotePreimage({required _i2.H256 hash}) { + return _i9.Preimage(_i10.UnnotePreimage(hash: hash)); + } + + /// Request a preimage be uploaded to the chain without paying any fees or deposits. + /// + /// If the preimage requests has already been provided on-chain, we unreserve any deposit + /// a user may have paid, and take the control of the preimage out of their hands. + _i9.Preimage requestPreimage({required _i2.H256 hash}) { + return _i9.Preimage(_i10.RequestPreimage(hash: hash)); + } + + /// Clear a previously made request for a preimage. + /// + /// NOTE: THIS MUST NOT BE CALLED ON `hash` MORE TIMES THAN `request_preimage`. + _i9.Preimage unrequestPreimage({required _i2.H256 hash}) { + return _i9.Preimage(_i10.UnrequestPreimage(hash: hash)); + } + + /// Ensure that the bulk of pre-images is upgraded. + /// + /// The caller pays no fee if at least 90% of pre-images were successfully updated. + _i9.Preimage ensureUpdated({required List<_i2.H256> hashes}) { + return _i9.Preimage(_i10.EnsureUpdated(hashes: hashes)); + } +} diff --git a/quantus_sdk/lib/generated/planck/pallets/q_po_w.dart b/quantus_sdk/lib/generated/planck/pallets/q_po_w.dart new file mode 100644 index 00000000..7b1dd91d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/q_po_w.dart @@ -0,0 +1,148 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i4; +import 'dart:typed_data' as _i5; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/primitive_types/u512.dart' as _i3; +import '../types/sp_arithmetic/fixed_point/fixed_u128.dart' as _i6; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue _lastBlockTime = const _i1.StorageValue( + prefix: 'QPoW', + storage: 'LastBlockTime', + valueCodec: _i2.U64Codec.codec, + ); + + final _i1.StorageValue _lastBlockDuration = const _i1.StorageValue( + prefix: 'QPoW', + storage: 'LastBlockDuration', + valueCodec: _i2.U64Codec.codec, + ); + + final _i1.StorageValue<_i3.U512> _currentDifficulty = const _i1.StorageValue<_i3.U512>( + prefix: 'QPoW', + storage: 'CurrentDifficulty', + valueCodec: _i3.U512Codec(), + ); + + final _i1.StorageValue<_i3.U512> _totalWork = const _i1.StorageValue<_i3.U512>( + prefix: 'QPoW', + storage: 'TotalWork', + valueCodec: _i3.U512Codec(), + ); + + final _i1.StorageValue _blockTimeEma = const _i1.StorageValue( + prefix: 'QPoW', + storage: 'BlockTimeEma', + valueCodec: _i2.U64Codec.codec, + ); + + _i4.Future lastBlockTime({_i1.BlockHash? at}) async { + final hashedKey = _lastBlockTime.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _lastBlockTime.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + _i4.Future lastBlockDuration({_i1.BlockHash? at}) async { + final hashedKey = _lastBlockDuration.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _lastBlockDuration.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + _i4.Future<_i3.U512> currentDifficulty({_i1.BlockHash? at}) async { + final hashedKey = _currentDifficulty.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _currentDifficulty.decodeValue(bytes); + } + return List.filled(8, BigInt.zero, growable: false); /* Default */ + } + + _i4.Future<_i3.U512> totalWork({_i1.BlockHash? at}) async { + final hashedKey = _totalWork.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _totalWork.decodeValue(bytes); + } + return List.filled(8, BigInt.zero, growable: false); /* Default */ + } + + _i4.Future blockTimeEma({_i1.BlockHash? at}) async { + final hashedKey = _blockTimeEma.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _blockTimeEma.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + /// Returns the storage key for `lastBlockTime`. + _i5.Uint8List lastBlockTimeKey() { + final hashedKey = _lastBlockTime.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `lastBlockDuration`. + _i5.Uint8List lastBlockDurationKey() { + final hashedKey = _lastBlockDuration.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `currentDifficulty`. + _i5.Uint8List currentDifficultyKey() { + final hashedKey = _currentDifficulty.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `totalWork`. + _i5.Uint8List totalWorkKey() { + final hashedKey = _totalWork.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `blockTimeEma`. + _i5.Uint8List blockTimeEmaKey() { + final hashedKey = _blockTimeEma.hashedKey(); + return hashedKey; + } +} + +class Constants { + Constants(); + + /// Pallet's weight info + final _i3.U512 initialDifficulty = [ + BigInt.from(1189189), + BigInt.from(0), + BigInt.from(0), + BigInt.from(0), + BigInt.from(0), + BigInt.from(0), + BigInt.from(0), + BigInt.from(0), + ]; + + final _i6.FixedU128 difficultyAdjustPercentClamp = BigInt.parse('100000000000000000', radix: 10); + + final BigInt targetBlockTime = BigInt.from(12000); + + /// EMA smoothing factor (0-1000, where 1000 = 1.0) + final int emaAlpha = 100; + + final int maxReorgDepth = 180; + + /// Fixed point scale for calculations (default: 10^18) + final BigInt fixedU128Scale = BigInt.parse('1000000000000000000', radix: 10); +} diff --git a/quantus_sdk/lib/generated/planck/pallets/recovery.dart b/quantus_sdk/lib/generated/planck/pallets/recovery.dart new file mode 100644 index 00000000..4e0e8134 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/recovery.dart @@ -0,0 +1,338 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i5; +import 'dart:typed_data' as _i6; + +import 'package:polkadart/polkadart.dart' as _i1; + +import '../types/pallet_recovery/active_recovery.dart' as _i4; +import '../types/pallet_recovery/pallet/call.dart' as _i9; +import '../types/pallet_recovery/recovery_config.dart' as _i3; +import '../types/quantus_runtime/runtime_call.dart' as _i7; +import '../types/sp_core/crypto/account_id32.dart' as _i2; +import '../types/sp_runtime/multiaddress/multi_address.dart' as _i8; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageMap<_i2.AccountId32, _i3.RecoveryConfig> _recoverable = + const _i1.StorageMap<_i2.AccountId32, _i3.RecoveryConfig>( + prefix: 'Recovery', + storage: 'Recoverable', + valueCodec: _i3.RecoveryConfig.codec, + hasher: _i1.StorageHasher.twoxx64Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageDoubleMap<_i2.AccountId32, _i2.AccountId32, _i4.ActiveRecovery> _activeRecoveries = + const _i1.StorageDoubleMap<_i2.AccountId32, _i2.AccountId32, _i4.ActiveRecovery>( + prefix: 'Recovery', + storage: 'ActiveRecoveries', + valueCodec: _i4.ActiveRecovery.codec, + hasher1: _i1.StorageHasher.twoxx64Concat(_i2.AccountId32Codec()), + hasher2: _i1.StorageHasher.twoxx64Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageMap<_i2.AccountId32, _i2.AccountId32> _proxy = + const _i1.StorageMap<_i2.AccountId32, _i2.AccountId32>( + prefix: 'Recovery', + storage: 'Proxy', + valueCodec: _i2.AccountId32Codec(), + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + /// The set of recoverable accounts and their recovery configuration. + _i5.Future<_i3.RecoveryConfig?> recoverable(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _recoverable.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _recoverable.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Active recovery attempts. + /// + /// First account is the account to be recovered, and the second account + /// is the user trying to recover the account. + _i5.Future<_i4.ActiveRecovery?> activeRecoveries( + _i2.AccountId32 key1, + _i2.AccountId32 key2, { + _i1.BlockHash? at, + }) async { + final hashedKey = _activeRecoveries.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _activeRecoveries.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The list of allowed proxy accounts. + /// + /// Map from the user who can access it to the recovered account. + _i5.Future<_i2.AccountId32?> proxy(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _proxy.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _proxy.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The set of recoverable accounts and their recovery configuration. + _i5.Future> multiRecoverable(List<_i2.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _recoverable.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _recoverable.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// The list of allowed proxy accounts. + /// + /// Map from the user who can access it to the recovered account. + _i5.Future> multiProxy(List<_i2.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _proxy.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _proxy.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Returns the storage key for `recoverable`. + _i6.Uint8List recoverableKey(_i2.AccountId32 key1) { + final hashedKey = _recoverable.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `activeRecoveries`. + _i6.Uint8List activeRecoveriesKey(_i2.AccountId32 key1, _i2.AccountId32 key2) { + final hashedKey = _activeRecoveries.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage key for `proxy`. + _i6.Uint8List proxyKey(_i2.AccountId32 key1) { + final hashedKey = _proxy.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `recoverable`. + _i6.Uint8List recoverableMapPrefix() { + final hashedKey = _recoverable.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `activeRecoveries`. + _i6.Uint8List activeRecoveriesMapPrefix(_i2.AccountId32 key1) { + final hashedKey = _activeRecoveries.mapPrefix(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `proxy`. + _i6.Uint8List proxyMapPrefix() { + final hashedKey = _proxy.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Send a call through a recovered account. + /// + /// The dispatch origin for this call must be _Signed_ and registered to + /// be able to make calls on behalf of the recovered account. + /// + /// Parameters: + /// - `account`: The recovered account you want to make a call on-behalf-of. + /// - `call`: The call you want to make with the recovered account. + _i7.Recovery asRecovered({required _i8.MultiAddress account, required _i7.RuntimeCall call}) { + return _i7.Recovery(_i9.AsRecovered(account: account, call: call)); + } + + /// Allow ROOT to bypass the recovery process and set a rescuer account + /// for a lost account directly. + /// + /// The dispatch origin for this call must be _ROOT_. + /// + /// Parameters: + /// - `lost`: The "lost account" to be recovered. + /// - `rescuer`: The "rescuer account" which can call as the lost account. + _i7.Recovery setRecovered({required _i8.MultiAddress lost, required _i8.MultiAddress rescuer}) { + return _i7.Recovery(_i9.SetRecovered(lost: lost, rescuer: rescuer)); + } + + /// Create a recovery configuration for your account. This makes your account recoverable. + /// + /// Payment: `ConfigDepositBase` + `FriendDepositFactor` * #_of_friends balance + /// will be reserved for storing the recovery configuration. This deposit is returned + /// in full when the user calls `remove_recovery`. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `friends`: A list of friends you trust to vouch for recovery attempts. Should be + /// ordered and contain no duplicate values. + /// - `threshold`: The number of friends that must vouch for a recovery attempt before the + /// account can be recovered. Should be less than or equal to the length of the list of + /// friends. + /// - `delay_period`: The number of blocks after a recovery attempt is initialized that + /// needs to pass before the account can be recovered. + _i7.Recovery createRecovery({ + required List<_i2.AccountId32> friends, + required int threshold, + required int delayPeriod, + }) { + return _i7.Recovery(_i9.CreateRecovery(friends: friends, threshold: threshold, delayPeriod: delayPeriod)); + } + + /// Initiate the process for recovering a recoverable account. + /// + /// Payment: `RecoveryDeposit` balance will be reserved for initiating the + /// recovery process. This deposit will always be repatriated to the account + /// trying to be recovered. See `close_recovery`. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `account`: The lost account that you want to recover. This account needs to be + /// recoverable (i.e. have a recovery configuration). + _i7.Recovery initiateRecovery({required _i8.MultiAddress account}) { + return _i7.Recovery(_i9.InitiateRecovery(account: account)); + } + + /// Allow a "friend" of a recoverable account to vouch for an active recovery + /// process for that account. + /// + /// The dispatch origin for this call must be _Signed_ and must be a "friend" + /// for the recoverable account. + /// + /// Parameters: + /// - `lost`: The lost account that you want to recover. + /// - `rescuer`: The account trying to rescue the lost account that you want to vouch for. + /// + /// The combination of these two parameters must point to an active recovery + /// process. + _i7.Recovery vouchRecovery({required _i8.MultiAddress lost, required _i8.MultiAddress rescuer}) { + return _i7.Recovery(_i9.VouchRecovery(lost: lost, rescuer: rescuer)); + } + + /// Allow a successful rescuer to claim their recovered account. + /// + /// The dispatch origin for this call must be _Signed_ and must be a "rescuer" + /// who has successfully completed the account recovery process: collected + /// `threshold` or more vouches, waited `delay_period` blocks since initiation. + /// + /// Parameters: + /// - `account`: The lost account that you want to claim has been successfully recovered by + /// you. + _i7.Recovery claimRecovery({required _i8.MultiAddress account}) { + return _i7.Recovery(_i9.ClaimRecovery(account: account)); + } + + /// As the controller of a recoverable account, close an active recovery + /// process for your account. + /// + /// Payment: By calling this function, the recoverable account will receive + /// the recovery deposit `RecoveryDeposit` placed by the rescuer. + /// + /// The dispatch origin for this call must be _Signed_ and must be a + /// recoverable account with an active recovery process for it. + /// + /// Parameters: + /// - `rescuer`: The account trying to rescue this recoverable account. + _i7.Recovery closeRecovery({required _i8.MultiAddress rescuer}) { + return _i7.Recovery(_i9.CloseRecovery(rescuer: rescuer)); + } + + /// Remove the recovery process for your account. Recovered accounts are still accessible. + /// + /// NOTE: The user must make sure to call `close_recovery` on all active + /// recovery attempts before calling this function else it will fail. + /// + /// Payment: By calling this function the recoverable account will unreserve + /// their recovery configuration deposit. + /// (`ConfigDepositBase` + `FriendDepositFactor` * #_of_friends) + /// + /// The dispatch origin for this call must be _Signed_ and must be a + /// recoverable account (i.e. has a recovery configuration). + _i7.Recovery removeRecovery() { + return _i7.Recovery(_i9.RemoveRecovery()); + } + + /// Cancel the ability to use `as_recovered` for `account`. + /// + /// The dispatch origin for this call must be _Signed_ and registered to + /// be able to make calls on behalf of the recovered account. + /// + /// Parameters: + /// - `account`: The recovered account you are able to call on-behalf-of. + _i7.Recovery cancelRecovered({required _i8.MultiAddress account}) { + return _i7.Recovery(_i9.CancelRecovered(account: account)); + } + + /// Poke deposits for recovery configurations and / or active recoveries. + /// + /// This can be used by accounts to possibly lower their locked amount. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `maybe_account`: Optional recoverable account for which you have an active recovery + /// and want to adjust the deposit for the active recovery. + /// + /// This function checks both recovery configuration deposit and active recovery deposits + /// of the caller: + /// - If the caller has created a recovery configuration, checks and adjusts its deposit + /// - If the caller has initiated any active recoveries, and provides the account in + /// `maybe_account`, checks and adjusts those deposits + /// + /// If any deposit is updated, the difference will be reserved/unreserved from the caller's + /// account. + /// + /// The transaction is made free if any deposit is updated and paid otherwise. + /// + /// Emits `DepositPoked` if any deposit is updated. + /// Multiple events may be emitted in case both types of deposits are updated. + _i7.Recovery pokeDeposit({_i8.MultiAddress? maybeAccount}) { + return _i7.Recovery(_i9.PokeDeposit(maybeAccount: maybeAccount)); + } +} + +class Constants { + Constants(); + + /// The base amount of currency needed to reserve for creating a recovery configuration. + /// + /// This is held for an additional storage item whose value size is + /// `2 + sizeof(BlockNumber, Balance)` bytes. + final BigInt configDepositBase = BigInt.from(10000000000000); + + /// The amount of currency needed per additional user when creating a recovery + /// configuration. + /// + /// This is held for adding `sizeof(AccountId)` bytes more into a pre-existing storage + /// value. + final BigInt friendDepositFactor = BigInt.from(1000000000000); + + /// The maximum amount of friends allowed in a recovery configuration. + /// + /// NOTE: The threshold programmed in this Pallet uses u16, so it does + /// not really make sense to have a limit here greater than u16::MAX. + /// But also, that is a lot more than you should probably set this value + /// to anyway... + final int maxFriends = 9; + + /// The base amount of currency needed to reserve for starting a recovery. + /// + /// This is primarily held for deterring malicious recovery attempts, and should + /// have a value large enough that a bad actor would choose not to place this + /// deposit. It also acts to fund additional storage item whose value size is + /// `sizeof(BlockNumber, Balance + T * AccountId)` bytes. Where T is a configurable + /// threshold. + final BigInt recoveryDeposit = BigInt.from(10000000000000); +} diff --git a/quantus_sdk/lib/generated/planck/pallets/referenda.dart b/quantus_sdk/lib/generated/planck/pallets/referenda.dart new file mode 100644 index 00000000..1ef60a18 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/referenda.dart @@ -0,0 +1,382 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i6; +import 'dart:typed_data' as _i7; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/frame_support/traits/preimages/bounded.dart' as _i10; +import '../types/frame_support/traits/schedule/dispatch_time.dart' as _i11; +import '../types/pallet_referenda/pallet/call_1.dart' as _i12; +import '../types/pallet_referenda/types/curve.dart' as _i14; +import '../types/pallet_referenda/types/referendum_info_1.dart' as _i3; +import '../types/pallet_referenda/types/track_details.dart' as _i13; +import '../types/primitive_types/h256.dart' as _i5; +import '../types/quantus_runtime/origin_caller.dart' as _i9; +import '../types/quantus_runtime/runtime_call.dart' as _i8; +import '../types/tuples.dart' as _i4; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue _referendumCount = const _i1.StorageValue( + prefix: 'Referenda', + storage: 'ReferendumCount', + valueCodec: _i2.U32Codec.codec, + ); + + final _i1.StorageMap _referendumInfoFor = const _i1.StorageMap( + prefix: 'Referenda', + storage: 'ReferendumInfoFor', + valueCodec: _i3.ReferendumInfo.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i2.U32Codec.codec), + ); + + final _i1.StorageMap>> _trackQueue = + const _i1.StorageMap>>( + prefix: 'Referenda', + storage: 'TrackQueue', + valueCodec: _i2.SequenceCodec<_i4.Tuple2>( + _i4.Tuple2Codec(_i2.U32Codec.codec, _i2.U128Codec.codec), + ), + hasher: _i1.StorageHasher.twoxx64Concat(_i2.U16Codec.codec), + ); + + final _i1.StorageMap _decidingCount = const _i1.StorageMap( + prefix: 'Referenda', + storage: 'DecidingCount', + valueCodec: _i2.U32Codec.codec, + hasher: _i1.StorageHasher.twoxx64Concat(_i2.U16Codec.codec), + ); + + final _i1.StorageMap _metadataOf = const _i1.StorageMap( + prefix: 'Referenda', + storage: 'MetadataOf', + valueCodec: _i5.H256Codec(), + hasher: _i1.StorageHasher.blake2b128Concat(_i2.U32Codec.codec), + ); + + /// The next free referendum index, aka the number of referenda started so far. + _i6.Future referendumCount({_i1.BlockHash? at}) async { + final hashedKey = _referendumCount.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _referendumCount.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// Information concerning any given referendum. + _i6.Future<_i3.ReferendumInfo?> referendumInfoFor(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _referendumInfoFor.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _referendumInfoFor.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The sorted list of referenda ready to be decided but not yet being decided, ordered by + /// conviction-weighted approvals. + /// + /// This should be empty if `DecidingCount` is less than `TrackInfo::max_deciding`. + _i6.Future>> trackQueue(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _trackQueue.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _trackQueue.decodeValue(bytes); + } + return []; /* Default */ + } + + /// The number of referenda being decided currently. + _i6.Future decidingCount(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _decidingCount.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _decidingCount.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// The metadata is a general information concerning the referendum. + /// The `Hash` refers to the preimage of the `Preimages` provider which can be a JSON + /// dump or IPFS hash of a JSON file. + /// + /// Consider a garbage collection for a metadata of finished referendums to `unrequest` (remove) + /// large preimages. + _i6.Future<_i5.H256?> metadataOf(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _metadataOf.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _metadataOf.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Information concerning any given referendum. + _i6.Future> multiReferendumInfoFor(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _referendumInfoFor.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _referendumInfoFor.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// The sorted list of referenda ready to be decided but not yet being decided, ordered by + /// conviction-weighted approvals. + /// + /// This should be empty if `DecidingCount` is less than `TrackInfo::max_deciding`. + _i6.Future>>> multiTrackQueue(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _trackQueue.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _trackQueue.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>>); /* Default */ + } + + /// The number of referenda being decided currently. + _i6.Future> multiDecidingCount(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _decidingCount.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _decidingCount.decodeValue(v.key)).toList(); + } + return (keys.map((key) => 0).toList() as List); /* Default */ + } + + /// The metadata is a general information concerning the referendum. + /// The `Hash` refers to the preimage of the `Preimages` provider which can be a JSON + /// dump or IPFS hash of a JSON file. + /// + /// Consider a garbage collection for a metadata of finished referendums to `unrequest` (remove) + /// large preimages. + _i6.Future> multiMetadataOf(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _metadataOf.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _metadataOf.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Returns the storage key for `referendumCount`. + _i7.Uint8List referendumCountKey() { + final hashedKey = _referendumCount.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `referendumInfoFor`. + _i7.Uint8List referendumInfoForKey(int key1) { + final hashedKey = _referendumInfoFor.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `trackQueue`. + _i7.Uint8List trackQueueKey(int key1) { + final hashedKey = _trackQueue.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `decidingCount`. + _i7.Uint8List decidingCountKey(int key1) { + final hashedKey = _decidingCount.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `metadataOf`. + _i7.Uint8List metadataOfKey(int key1) { + final hashedKey = _metadataOf.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `referendumInfoFor`. + _i7.Uint8List referendumInfoForMapPrefix() { + final hashedKey = _referendumInfoFor.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `trackQueue`. + _i7.Uint8List trackQueueMapPrefix() { + final hashedKey = _trackQueue.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `decidingCount`. + _i7.Uint8List decidingCountMapPrefix() { + final hashedKey = _decidingCount.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `metadataOf`. + _i7.Uint8List metadataOfMapPrefix() { + final hashedKey = _metadataOf.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Propose a referendum on a privileged action. + /// + /// - `origin`: must be `SubmitOrigin` and the account must have `SubmissionDeposit` funds + /// available. + /// - `proposal_origin`: The origin from which the proposal should be executed. + /// - `proposal`: The proposal. + /// - `enactment_moment`: The moment that the proposal should be enacted. + /// + /// Emits `Submitted`. + _i8.Referenda submit({ + required _i9.OriginCaller proposalOrigin, + required _i10.Bounded proposal, + required _i11.DispatchTime enactmentMoment, + }) { + return _i8.Referenda( + _i12.Submit(proposalOrigin: proposalOrigin, proposal: proposal, enactmentMoment: enactmentMoment), + ); + } + + /// Post the Decision Deposit for a referendum. + /// + /// - `origin`: must be `Signed` and the account must have funds available for the + /// referendum's track's Decision Deposit. + /// - `index`: The index of the submitted referendum whose Decision Deposit is yet to be + /// posted. + /// + /// Emits `DecisionDepositPlaced`. + _i8.Referenda placeDecisionDeposit({required int index}) { + return _i8.Referenda(_i12.PlaceDecisionDeposit(index: index)); + } + + /// Refund the Decision Deposit for a closed referendum back to the depositor. + /// + /// - `origin`: must be `Signed` or `Root`. + /// - `index`: The index of a closed referendum whose Decision Deposit has not yet been + /// refunded. + /// + /// Emits `DecisionDepositRefunded`. + _i8.Referenda refundDecisionDeposit({required int index}) { + return _i8.Referenda(_i12.RefundDecisionDeposit(index: index)); + } + + /// Cancel an ongoing referendum. + /// + /// - `origin`: must be the `CancelOrigin`. + /// - `index`: The index of the referendum to be cancelled. + /// + /// Emits `Cancelled`. + _i8.Referenda cancel({required int index}) { + return _i8.Referenda(_i12.Cancel(index: index)); + } + + /// Cancel an ongoing referendum and slash the deposits. + /// + /// - `origin`: must be the `KillOrigin`. + /// - `index`: The index of the referendum to be cancelled. + /// + /// Emits `Killed` and `DepositSlashed`. + _i8.Referenda kill({required int index}) { + return _i8.Referenda(_i12.Kill(index: index)); + } + + /// Advance a referendum onto its next logical state. Only used internally. + /// + /// - `origin`: must be `Root`. + /// - `index`: the referendum to be advanced. + _i8.Referenda nudgeReferendum({required int index}) { + return _i8.Referenda(_i12.NudgeReferendum(index: index)); + } + + /// Advance a track onto its next logical state. Only used internally. + /// + /// - `origin`: must be `Root`. + /// - `track`: the track to be advanced. + /// + /// Action item for when there is now one fewer referendum in the deciding phase and the + /// `DecidingCount` is not yet updated. This means that we should either: + /// - begin deciding another referendum (and leave `DecidingCount` alone); or + /// - decrement `DecidingCount`. + _i8.Referenda oneFewerDeciding({required int track}) { + return _i8.Referenda(_i12.OneFewerDeciding(track: track)); + } + + /// Refund the Submission Deposit for a closed referendum back to the depositor. + /// + /// - `origin`: must be `Signed` or `Root`. + /// - `index`: The index of a closed referendum whose Submission Deposit has not yet been + /// refunded. + /// + /// Emits `SubmissionDepositRefunded`. + _i8.Referenda refundSubmissionDeposit({required int index}) { + return _i8.Referenda(_i12.RefundSubmissionDeposit(index: index)); + } + + /// Set or clear metadata of a referendum. + /// + /// Parameters: + /// - `origin`: Must be `Signed` by a creator of a referendum or by anyone to clear a + /// metadata of a finished referendum. + /// - `index`: The index of a referendum to set or clear metadata for. + /// - `maybe_hash`: The hash of an on-chain stored preimage. `None` to clear a metadata. + _i8.Referenda setMetadata({required int index, _i5.H256? maybeHash}) { + return _i8.Referenda(_i12.SetMetadata(index: index, maybeHash: maybeHash)); + } +} + +class Constants { + Constants(); + + /// The minimum amount to be used as a deposit for a public referendum proposal. + final BigInt submissionDeposit = BigInt.from(100000000000000); + + /// Maximum size of the referendum queue for a single track. + final int maxQueued = 100; + + /// The number of blocks after submission that a referendum must begin being decided by. + /// Once this passes, then anyone may cancel the referendum. + final int undecidingTimeout = 324000; + + /// Quantization level for the referendum wakeup scheduler. A higher number will result in + /// fewer storage reads/writes needed for smaller voters, but also result in delays to the + /// automatic referendum status changes. Explicit servicing instructions are unaffected. + final int alarmInterval = 1; + + /// A list of tracks. + /// + /// Note: if the tracks are dynamic, the value in the static metadata might be inaccurate. + final List<_i4.Tuple2> tracks = [ + _i4.Tuple2( + 0, + _i13.TrackDetails( + name: 'signed', + maxDeciding: 5, + decisionDeposit: BigInt.from(500000000000000), + preparePeriod: 3600, + decisionPeriod: 50400, + confirmPeriod: 3600, + minEnactmentPeriod: 7200, + minApproval: const _i14.LinearDecreasing(length: 1000000000, floor: 550000000, ceil: 700000000), + minSupport: const _i14.LinearDecreasing(length: 1000000000, floor: 50000000, ceil: 250000000), + ), + ), + _i4.Tuple2( + 1, + _i13.TrackDetails( + name: 'signaling', + maxDeciding: 20, + decisionDeposit: BigInt.from(100000000000000), + preparePeriod: 1800, + decisionPeriod: 36000, + confirmPeriod: 900, + minEnactmentPeriod: 1, + minApproval: const _i14.LinearDecreasing(length: 1000000000, floor: 500000000, ceil: 600000000), + minSupport: const _i14.LinearDecreasing(length: 1000000000, floor: 10000000, ceil: 100000000), + ), + ), + ]; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/reversible_transfers.dart b/quantus_sdk/lib/generated/planck/pallets/reversible_transfers.dart new file mode 100644 index 00000000..c417f9d8 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/reversible_transfers.dart @@ -0,0 +1,419 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i7; +import 'dart:typed_data' as _i8; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i6; + +import '../types/pallet_reversible_transfers/high_security_account_data.dart' as _i3; +import '../types/pallet_reversible_transfers/pallet/call.dart' as _i11; +import '../types/pallet_reversible_transfers/pending_transfer.dart' as _i5; +import '../types/primitive_types/h256.dart' as _i4; +import '../types/qp_scheduler/block_number_or_timestamp.dart' as _i10; +import '../types/quantus_runtime/runtime_call.dart' as _i9; +import '../types/sp_arithmetic/per_things/permill.dart' as _i13; +import '../types/sp_core/crypto/account_id32.dart' as _i2; +import '../types/sp_runtime/multiaddress/multi_address.dart' as _i12; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageMap<_i2.AccountId32, _i3.HighSecurityAccountData> _highSecurityAccounts = + const _i1.StorageMap<_i2.AccountId32, _i3.HighSecurityAccountData>( + prefix: 'ReversibleTransfers', + storage: 'HighSecurityAccounts', + valueCodec: _i3.HighSecurityAccountData.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageMap<_i4.H256, _i5.PendingTransfer> _pendingTransfers = + const _i1.StorageMap<_i4.H256, _i5.PendingTransfer>( + prefix: 'ReversibleTransfers', + storage: 'PendingTransfers', + valueCodec: _i5.PendingTransfer.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i4.H256Codec()), + ); + + final _i1.StorageMap<_i2.AccountId32, int> _accountPendingIndex = const _i1.StorageMap<_i2.AccountId32, int>( + prefix: 'ReversibleTransfers', + storage: 'AccountPendingIndex', + valueCodec: _i6.U32Codec.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageMap<_i2.AccountId32, List<_i4.H256>> _pendingTransfersBySender = + const _i1.StorageMap<_i2.AccountId32, List<_i4.H256>>( + prefix: 'ReversibleTransfers', + storage: 'PendingTransfersBySender', + valueCodec: _i6.SequenceCodec<_i4.H256>(_i4.H256Codec()), + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageMap<_i2.AccountId32, List<_i4.H256>> _pendingTransfersByRecipient = + const _i1.StorageMap<_i2.AccountId32, List<_i4.H256>>( + prefix: 'ReversibleTransfers', + storage: 'PendingTransfersByRecipient', + valueCodec: _i6.SequenceCodec<_i4.H256>(_i4.H256Codec()), + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageMap<_i2.AccountId32, List<_i2.AccountId32>> _interceptorIndex = + const _i1.StorageMap<_i2.AccountId32, List<_i2.AccountId32>>( + prefix: 'ReversibleTransfers', + storage: 'InterceptorIndex', + valueCodec: _i6.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()), + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageValue _globalNonce = const _i1.StorageValue( + prefix: 'ReversibleTransfers', + storage: 'GlobalNonce', + valueCodec: _i6.U64Codec.codec, + ); + + /// Maps accounts to their chosen reversibility delay period (in milliseconds). + /// Accounts present in this map have reversibility enabled. + _i7.Future<_i3.HighSecurityAccountData?> highSecurityAccounts(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _highSecurityAccounts.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _highSecurityAccounts.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Stores the details of pending transactions scheduled for delayed execution. + /// Keyed by the unique transaction ID. + _i7.Future<_i5.PendingTransfer?> pendingTransfers(_i4.H256 key1, {_i1.BlockHash? at}) async { + final hashedKey = _pendingTransfers.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _pendingTransfers.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Indexes pending transaction IDs per account for efficient lookup and cancellation. + /// Also enforces the maximum pending transactions limit per account. + _i7.Future accountPendingIndex(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _accountPendingIndex.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _accountPendingIndex.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// Maps sender accounts to their list of pending transaction IDs. + /// This allows users to query all their outgoing pending transfers. + _i7.Future> pendingTransfersBySender(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _pendingTransfersBySender.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _pendingTransfersBySender.decodeValue(bytes); + } + return []; /* Default */ + } + + /// Maps recipient accounts to their list of pending incoming transaction IDs. + /// This allows users to query all their incoming pending transfers. + _i7.Future> pendingTransfersByRecipient(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _pendingTransfersByRecipient.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _pendingTransfersByRecipient.decodeValue(bytes); + } + return []; /* Default */ + } + + /// Maps interceptor accounts to the list of accounts they can intercept for. + /// This allows the UI to efficiently query all accounts for which a given account is an + /// interceptor. + _i7.Future> interceptorIndex(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _interceptorIndex.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _interceptorIndex.decodeValue(bytes); + } + return []; /* Default */ + } + + /// Global nonce for generating unique transaction IDs. + _i7.Future globalNonce({_i1.BlockHash? at}) async { + final hashedKey = _globalNonce.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _globalNonce.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + /// Maps accounts to their chosen reversibility delay period (in milliseconds). + /// Accounts present in this map have reversibility enabled. + _i7.Future> multiHighSecurityAccounts( + List<_i2.AccountId32> keys, { + _i1.BlockHash? at, + }) async { + final hashedKeys = keys.map((key) => _highSecurityAccounts.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _highSecurityAccounts.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Stores the details of pending transactions scheduled for delayed execution. + /// Keyed by the unique transaction ID. + _i7.Future> multiPendingTransfers(List<_i4.H256> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _pendingTransfers.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _pendingTransfers.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Indexes pending transaction IDs per account for efficient lookup and cancellation. + /// Also enforces the maximum pending transactions limit per account. + _i7.Future> multiAccountPendingIndex(List<_i2.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _accountPendingIndex.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _accountPendingIndex.decodeValue(v.key)).toList(); + } + return (keys.map((key) => 0).toList() as List); /* Default */ + } + + /// Maps sender accounts to their list of pending transaction IDs. + /// This allows users to query all their outgoing pending transfers. + _i7.Future>> multiPendingTransfersBySender( + List<_i2.AccountId32> keys, { + _i1.BlockHash? at, + }) async { + final hashedKeys = keys.map((key) => _pendingTransfersBySender.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _pendingTransfersBySender.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>); /* Default */ + } + + /// Maps recipient accounts to their list of pending incoming transaction IDs. + /// This allows users to query all their incoming pending transfers. + _i7.Future>> multiPendingTransfersByRecipient( + List<_i2.AccountId32> keys, { + _i1.BlockHash? at, + }) async { + final hashedKeys = keys.map((key) => _pendingTransfersByRecipient.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _pendingTransfersByRecipient.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>); /* Default */ + } + + /// Maps interceptor accounts to the list of accounts they can intercept for. + /// This allows the UI to efficiently query all accounts for which a given account is an + /// interceptor. + _i7.Future>> multiInterceptorIndex(List<_i2.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _interceptorIndex.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _interceptorIndex.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>); /* Default */ + } + + /// Returns the storage key for `highSecurityAccounts`. + _i8.Uint8List highSecurityAccountsKey(_i2.AccountId32 key1) { + final hashedKey = _highSecurityAccounts.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `pendingTransfers`. + _i8.Uint8List pendingTransfersKey(_i4.H256 key1) { + final hashedKey = _pendingTransfers.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `accountPendingIndex`. + _i8.Uint8List accountPendingIndexKey(_i2.AccountId32 key1) { + final hashedKey = _accountPendingIndex.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `pendingTransfersBySender`. + _i8.Uint8List pendingTransfersBySenderKey(_i2.AccountId32 key1) { + final hashedKey = _pendingTransfersBySender.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `pendingTransfersByRecipient`. + _i8.Uint8List pendingTransfersByRecipientKey(_i2.AccountId32 key1) { + final hashedKey = _pendingTransfersByRecipient.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `interceptorIndex`. + _i8.Uint8List interceptorIndexKey(_i2.AccountId32 key1) { + final hashedKey = _interceptorIndex.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `globalNonce`. + _i8.Uint8List globalNonceKey() { + final hashedKey = _globalNonce.hashedKey(); + return hashedKey; + } + + /// Returns the storage map key prefix for `highSecurityAccounts`. + _i8.Uint8List highSecurityAccountsMapPrefix() { + final hashedKey = _highSecurityAccounts.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `pendingTransfers`. + _i8.Uint8List pendingTransfersMapPrefix() { + final hashedKey = _pendingTransfers.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `accountPendingIndex`. + _i8.Uint8List accountPendingIndexMapPrefix() { + final hashedKey = _accountPendingIndex.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `pendingTransfersBySender`. + _i8.Uint8List pendingTransfersBySenderMapPrefix() { + final hashedKey = _pendingTransfersBySender.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `pendingTransfersByRecipient`. + _i8.Uint8List pendingTransfersByRecipientMapPrefix() { + final hashedKey = _pendingTransfersByRecipient.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `interceptorIndex`. + _i8.Uint8List interceptorIndexMapPrefix() { + final hashedKey = _interceptorIndex.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Enable high-security for the calling account with a specified + /// reversibility delay. + /// + /// Recoverer and interceptor (aka guardian) could be the same account or + /// different accounts. + /// + /// Once an account is set as high security it can only make reversible + /// transfers. It is not allowed any other calls. + /// + /// - `delay`: The reversibility time for any transfer made by the high + /// security account. + /// - interceptor: The account that can intercept transctions from the + /// high security account. + _i9.ReversibleTransfers setHighSecurity({ + required _i10.BlockNumberOrTimestamp delay, + required _i2.AccountId32 interceptor, + }) { + return _i9.ReversibleTransfers(_i11.SetHighSecurity(delay: delay, interceptor: interceptor)); + } + + /// Cancel a pending reversible transaction scheduled by the caller. + /// + /// - `tx_id`: The unique identifier of the transaction to cancel. + _i9.ReversibleTransfers cancel({required _i4.H256 txId}) { + return _i9.ReversibleTransfers(_i11.Cancel(txId: txId)); + } + + /// Called by the Scheduler to finalize the scheduled task/call + /// + /// - `tx_id`: The unique id of the transaction to finalize and dispatch. + _i9.ReversibleTransfers executeTransfer({required _i4.H256 txId}) { + return _i9.ReversibleTransfers(_i11.ExecuteTransfer(txId: txId)); + } + + /// Schedule a transaction for delayed execution. + _i9.ReversibleTransfers scheduleTransfer({required _i12.MultiAddress dest, required BigInt amount}) { + return _i9.ReversibleTransfers(_i11.ScheduleTransfer(dest: dest, amount: amount)); + } + + /// Schedule a transaction for delayed execution with a custom, one-time delay. + /// + /// This can only be used by accounts that have *not* set up a persistent + /// reversibility configuration with `set_high_security`. + /// + /// - `delay`: The time (in blocks or milliseconds) before the transaction executes. + _i9.ReversibleTransfers scheduleTransferWithDelay({ + required _i12.MultiAddress dest, + required BigInt amount, + required _i10.BlockNumberOrTimestamp delay, + }) { + return _i9.ReversibleTransfers(_i11.ScheduleTransferWithDelay(dest: dest, amount: amount, delay: delay)); + } + + /// Schedule an asset transfer (pallet-assets) for delayed execution using the configured + /// delay. + _i9.ReversibleTransfers scheduleAssetTransfer({ + required int assetId, + required _i12.MultiAddress dest, + required BigInt amount, + }) { + return _i9.ReversibleTransfers(_i11.ScheduleAssetTransfer(assetId: assetId, dest: dest, amount: amount)); + } + + /// Schedule an asset transfer (pallet-assets) with a custom one-time delay. + _i9.ReversibleTransfers scheduleAssetTransferWithDelay({ + required int assetId, + required _i12.MultiAddress dest, + required BigInt amount, + required _i10.BlockNumberOrTimestamp delay, + }) { + return _i9.ReversibleTransfers( + _i11.ScheduleAssetTransferWithDelay(assetId: assetId, dest: dest, amount: amount, delay: delay), + ); + } + + /// Allows the guardian (interceptor) to recover all funds from a high security + /// account by transferring the entire balance to themselves. + /// + /// This is an emergency function for when the high security account may be compromised. + _i9.ReversibleTransfers recoverFunds({required _i2.AccountId32 account}) { + return _i9.ReversibleTransfers(_i11.RecoverFunds(account: account)); + } +} + +class Constants { + Constants(); + + /// Maximum pending reversible transactions allowed per account. Used for BoundedVec. + final int maxPendingPerAccount = 10; + + /// Maximum number of accounts an interceptor can intercept for. Used for BoundedVec. + final int maxInterceptorAccounts = 32; + + /// The default delay period for reversible transactions if none is specified. + /// + /// NOTE: default delay is always in blocks. + final _i10.BlockNumberOrTimestamp defaultDelay = const _i10.BlockNumber(7200); + + /// The minimum delay period allowed for reversible transactions, in blocks. + final int minDelayPeriodBlocks = 2; + + /// The minimum delay period allowed for reversible transactions, in milliseconds. + final BigInt minDelayPeriodMoment = BigInt.from(12000); + + /// Volume fee taken from reversed transactions for high-security accounts only, + /// expressed as a Permill (e.g., Permill::from_percent(1) = 1%). Regular accounts incur no + /// fees. The fee is burned (removed from total issuance). + final _i13.Permill volumeFee = 10000; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/scheduler.dart b/quantus_sdk/lib/generated/planck/pallets/scheduler.dart new file mode 100644 index 00000000..b0b60e5a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/scheduler.dart @@ -0,0 +1,358 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i7; +import 'dart:typed_data' as _i8; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/pallet_scheduler/pallet/call.dart' as _i10; +import '../types/pallet_scheduler/retry_config.dart' as _i6; +import '../types/pallet_scheduler/scheduled.dart' as _i4; +import '../types/qp_scheduler/block_number_or_timestamp.dart' as _i3; +import '../types/quantus_runtime/runtime_call.dart' as _i9; +import '../types/sp_weights/weight_v2/weight.dart' as _i11; +import '../types/tuples_1.dart' as _i5; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue _incompleteBlockSince = const _i1.StorageValue( + prefix: 'Scheduler', + storage: 'IncompleteBlockSince', + valueCodec: _i2.U32Codec.codec, + ); + + final _i1.StorageValue _incompleteTimestampSince = const _i1.StorageValue( + prefix: 'Scheduler', + storage: 'IncompleteTimestampSince', + valueCodec: _i2.U64Codec.codec, + ); + + final _i1.StorageValue _lastProcessedTimestamp = const _i1.StorageValue( + prefix: 'Scheduler', + storage: 'LastProcessedTimestamp', + valueCodec: _i2.U64Codec.codec, + ); + + final _i1.StorageMap<_i3.BlockNumberOrTimestamp, List<_i4.Scheduled?>> _agenda = + const _i1.StorageMap<_i3.BlockNumberOrTimestamp, List<_i4.Scheduled?>>( + prefix: 'Scheduler', + storage: 'Agenda', + valueCodec: _i2.SequenceCodec<_i4.Scheduled?>(_i2.OptionCodec<_i4.Scheduled>(_i4.Scheduled.codec)), + hasher: _i1.StorageHasher.twoxx64Concat(_i3.BlockNumberOrTimestamp.codec), + ); + + final _i1.StorageMap<_i5.Tuple2<_i3.BlockNumberOrTimestamp, int>, _i6.RetryConfig> _retries = + const _i1.StorageMap<_i5.Tuple2<_i3.BlockNumberOrTimestamp, int>, _i6.RetryConfig>( + prefix: 'Scheduler', + storage: 'Retries', + valueCodec: _i6.RetryConfig.codec, + hasher: _i1.StorageHasher.blake2b128Concat( + _i5.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>(_i3.BlockNumberOrTimestamp.codec, _i2.U32Codec.codec), + ), + ); + + final _i1.StorageMap, _i5.Tuple2<_i3.BlockNumberOrTimestamp, int>> _lookup = + const _i1.StorageMap, _i5.Tuple2<_i3.BlockNumberOrTimestamp, int>>( + prefix: 'Scheduler', + storage: 'Lookup', + valueCodec: _i5.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i2.U32Codec.codec, + ), + hasher: _i1.StorageHasher.twoxx64Concat(_i2.U8ArrayCodec(32)), + ); + + /// Tracks incomplete block-based agendas that need to be processed in a later block. + _i7.Future incompleteBlockSince({_i1.BlockHash? at}) async { + final hashedKey = _incompleteBlockSince.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _incompleteBlockSince.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Tracks incomplete timestamp-based agendas that need to be processed in a later block. + _i7.Future incompleteTimestampSince({_i1.BlockHash? at}) async { + final hashedKey = _incompleteTimestampSince.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _incompleteTimestampSince.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Tracks the last timestamp bucket that was fully processed. + /// Used to avoid reprocessing all buckets from 0 on every run. + _i7.Future lastProcessedTimestamp({_i1.BlockHash? at}) async { + final hashedKey = _lastProcessedTimestamp.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _lastProcessedTimestamp.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Items to be executed, indexed by the block number that they should be executed on. + _i7.Future> agenda(_i3.BlockNumberOrTimestamp key1, {_i1.BlockHash? at}) async { + final hashedKey = _agenda.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _agenda.decodeValue(bytes); + } + return []; /* Default */ + } + + /// Retry configurations for items to be executed, indexed by task address. + _i7.Future<_i6.RetryConfig?> retries(_i5.Tuple2<_i3.BlockNumberOrTimestamp, int> key1, {_i1.BlockHash? at}) async { + final hashedKey = _retries.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _retries.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Lookup from a name to the block number and index of the task. + /// + /// For v3 -> v4 the previously unbounded identities are Blake2-256 hashed to form the v4 + /// identities. + _i7.Future<_i5.Tuple2<_i3.BlockNumberOrTimestamp, int>?> lookup(List key1, {_i1.BlockHash? at}) async { + final hashedKey = _lookup.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _lookup.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Items to be executed, indexed by the block number that they should be executed on. + _i7.Future>> multiAgenda(List<_i3.BlockNumberOrTimestamp> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _agenda.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _agenda.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>); /* Default */ + } + + /// Retry configurations for items to be executed, indexed by task address. + _i7.Future> multiRetries( + List<_i5.Tuple2<_i3.BlockNumberOrTimestamp, int>> keys, { + _i1.BlockHash? at, + }) async { + final hashedKeys = keys.map((key) => _retries.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _retries.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Lookup from a name to the block number and index of the task. + /// + /// For v3 -> v4 the previously unbounded identities are Blake2-256 hashed to form the v4 + /// identities. + _i7.Future?>> multiLookup( + List> keys, { + _i1.BlockHash? at, + }) async { + final hashedKeys = keys.map((key) => _lookup.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _lookup.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Returns the storage key for `incompleteBlockSince`. + _i8.Uint8List incompleteBlockSinceKey() { + final hashedKey = _incompleteBlockSince.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `incompleteTimestampSince`. + _i8.Uint8List incompleteTimestampSinceKey() { + final hashedKey = _incompleteTimestampSince.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `lastProcessedTimestamp`. + _i8.Uint8List lastProcessedTimestampKey() { + final hashedKey = _lastProcessedTimestamp.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `agenda`. + _i8.Uint8List agendaKey(_i3.BlockNumberOrTimestamp key1) { + final hashedKey = _agenda.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `retries`. + _i8.Uint8List retriesKey(_i5.Tuple2<_i3.BlockNumberOrTimestamp, int> key1) { + final hashedKey = _retries.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `lookup`. + _i8.Uint8List lookupKey(List key1) { + final hashedKey = _lookup.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `agenda`. + _i8.Uint8List agendaMapPrefix() { + final hashedKey = _agenda.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `retries`. + _i8.Uint8List retriesMapPrefix() { + final hashedKey = _retries.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `lookup`. + _i8.Uint8List lookupMapPrefix() { + final hashedKey = _lookup.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Anonymously schedule a task. + _i9.Scheduler schedule({ + required int when, + _i5.Tuple2<_i3.BlockNumberOrTimestamp, int>? maybePeriodic, + required int priority, + required _i9.RuntimeCall call, + }) { + return _i9.Scheduler(_i10.Schedule(when: when, maybePeriodic: maybePeriodic, priority: priority, call: call)); + } + + /// Cancel an anonymously scheduled task. + _i9.Scheduler cancel({required _i3.BlockNumberOrTimestamp when, required int index}) { + return _i9.Scheduler(_i10.Cancel(when: when, index: index)); + } + + /// Schedule a named task. + _i9.Scheduler scheduleNamed({ + required List id, + required int when, + _i5.Tuple2<_i3.BlockNumberOrTimestamp, int>? maybePeriodic, + required int priority, + required _i9.RuntimeCall call, + }) { + return _i9.Scheduler( + _i10.ScheduleNamed(id: id, when: when, maybePeriodic: maybePeriodic, priority: priority, call: call), + ); + } + + /// Cancel a named scheduled task. + _i9.Scheduler cancelNamed({required List id}) { + return _i9.Scheduler(_i10.CancelNamed(id: id)); + } + + /// Anonymously schedule a task after a delay. + _i9.Scheduler scheduleAfter({ + required _i3.BlockNumberOrTimestamp after, + _i5.Tuple2<_i3.BlockNumberOrTimestamp, int>? maybePeriodic, + required int priority, + required _i9.RuntimeCall call, + }) { + return _i9.Scheduler( + _i10.ScheduleAfter(after: after, maybePeriodic: maybePeriodic, priority: priority, call: call), + ); + } + + /// Schedule a named task after a delay. + _i9.Scheduler scheduleNamedAfter({ + required List id, + required _i3.BlockNumberOrTimestamp after, + _i5.Tuple2<_i3.BlockNumberOrTimestamp, int>? maybePeriodic, + required int priority, + required _i9.RuntimeCall call, + }) { + return _i9.Scheduler( + _i10.ScheduleNamedAfter(id: id, after: after, maybePeriodic: maybePeriodic, priority: priority, call: call), + ); + } + + /// Set a retry configuration for a task so that, in case its scheduled run fails, it will + /// be retried after `period` blocks, for a total amount of `retries` retries or until it + /// succeeds. + /// + /// Tasks which need to be scheduled for a retry are still subject to weight metering and + /// agenda space, same as a regular task. If a periodic task fails, it will be scheduled + /// normally while the task is retrying. + /// + /// Tasks scheduled as a result of a retry for a periodic task are unnamed, non-periodic + /// clones of the original task. Their retry configuration will be derived from the + /// original task's configuration, but will have a lower value for `remaining` than the + /// original `total_retries`. + _i9.Scheduler setRetry({ + required _i5.Tuple2<_i3.BlockNumberOrTimestamp, int> task, + required int retries, + required _i3.BlockNumberOrTimestamp period, + }) { + return _i9.Scheduler(_i10.SetRetry(task: task, retries: retries, period: period)); + } + + /// Set a retry configuration for a named task so that, in case its scheduled run fails, it + /// will be retried after `period` blocks, for a total amount of `retries` retries or until + /// it succeeds. + /// + /// Tasks which need to be scheduled for a retry are still subject to weight metering and + /// agenda space, same as a regular task. If a periodic task fails, it will be scheduled + /// normally while the task is retrying. + /// + /// Tasks scheduled as a result of a retry for a periodic task are unnamed, non-periodic + /// clones of the original task. Their retry configuration will be derived from the + /// original task's configuration, but will have a lower value for `remaining` than the + /// original `total_retries`. + _i9.Scheduler setRetryNamed({ + required List id, + required int retries, + required _i3.BlockNumberOrTimestamp period, + }) { + return _i9.Scheduler(_i10.SetRetryNamed(id: id, retries: retries, period: period)); + } + + /// Removes the retry configuration of a task. + _i9.Scheduler cancelRetry({required _i5.Tuple2<_i3.BlockNumberOrTimestamp, int> task}) { + return _i9.Scheduler(_i10.CancelRetry(task: task)); + } + + /// Cancel the retry configuration of a named task. + _i9.Scheduler cancelRetryNamed({required List id}) { + return _i9.Scheduler(_i10.CancelRetryNamed(id: id)); + } +} + +class Constants { + Constants(); + + /// The maximum weight that may be scheduled per block for any dispatchables. + final _i11.Weight maximumWeight = _i11.Weight( + refTime: BigInt.from(4800000000000), + proofSize: BigInt.parse('14757395258967641292', radix: 10), + ); + + /// The maximum number of scheduled calls in the queue for a single block. + /// + /// NOTE: + /// + Dependent pallets' benchmarks might require a higher limit for the setting. Set a + /// higher limit under `runtime-benchmarks` feature. + final int maxScheduledPerBlock = 50; + + /// Precision of the timestamp buckets. + /// + /// Timestamp based dispatches are rounded to the nearest bucket of this precision. + final BigInt timestampBucketSize = BigInt.from(24000); +} diff --git a/quantus_sdk/lib/generated/planck/pallets/sudo.dart b/quantus_sdk/lib/generated/planck/pallets/sudo.dart new file mode 100644 index 00000000..b9cadac9 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/sudo.dart @@ -0,0 +1,78 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i3; +import 'dart:typed_data' as _i4; + +import 'package:polkadart/polkadart.dart' as _i1; + +import '../types/pallet_sudo/pallet/call.dart' as _i6; +import '../types/quantus_runtime/runtime_call.dart' as _i5; +import '../types/sp_core/crypto/account_id32.dart' as _i2; +import '../types/sp_runtime/multiaddress/multi_address.dart' as _i8; +import '../types/sp_weights/weight_v2/weight.dart' as _i7; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue<_i2.AccountId32> _key = const _i1.StorageValue<_i2.AccountId32>( + prefix: 'Sudo', + storage: 'Key', + valueCodec: _i2.AccountId32Codec(), + ); + + /// The `AccountId` of the sudo key. + _i3.Future<_i2.AccountId32?> key({_i1.BlockHash? at}) async { + final hashedKey = _key.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _key.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Returns the storage key for `key`. + _i4.Uint8List keyKey() { + final hashedKey = _key.hashedKey(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Authenticates the sudo key and dispatches a function call with `Root` origin. + _i5.Sudo sudo({required _i5.RuntimeCall call}) { + return _i5.Sudo(_i6.Sudo(call: call)); + } + + /// Authenticates the sudo key and dispatches a function call with `Root` origin. + /// This function does not check the weight of the call, and instead allows the + /// Sudo user to specify the weight of the call. + /// + /// The dispatch origin for this call must be _Signed_. + _i5.Sudo sudoUncheckedWeight({required _i5.RuntimeCall call, required _i7.Weight weight}) { + return _i5.Sudo(_i6.SudoUncheckedWeight(call: call, weight: weight)); + } + + /// Authenticates the current sudo key and sets the given AccountId (`new`) as the new sudo + /// key. + _i5.Sudo setKey({required _i8.MultiAddress new_}) { + return _i5.Sudo(_i6.SetKey(new_: new_)); + } + + /// Authenticates the sudo key and dispatches a function call with `Signed` origin from + /// a given account. + /// + /// The dispatch origin for this call must be _Signed_. + _i5.Sudo sudoAs({required _i8.MultiAddress who, required _i5.RuntimeCall call}) { + return _i5.Sudo(_i6.SudoAs(who: who, call: call)); + } + + /// Permanently removes the sudo key. + /// + /// **This cannot be un-done.** + _i5.Sudo removeKey() { + return _i5.Sudo(_i6.RemoveKey()); + } +} diff --git a/quantus_sdk/lib/generated/planck/pallets/system.dart b/quantus_sdk/lib/generated/planck/pallets/system.dart new file mode 100644 index 00000000..6172599d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/system.dart @@ -0,0 +1,766 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i14; +import 'dart:typed_data' as _i16; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i4; + +import '../types/frame_support/dispatch/per_dispatch_class_1.dart' as _i5; +import '../types/frame_support/dispatch/per_dispatch_class_2.dart' as _i20; +import '../types/frame_support/dispatch/per_dispatch_class_3.dart' as _i23; +import '../types/frame_system/account_info.dart' as _i3; +import '../types/frame_system/code_upgrade_authorization.dart' as _i12; +import '../types/frame_system/event_record.dart' as _i8; +import '../types/frame_system/last_runtime_upgrade_info.dart' as _i10; +import '../types/frame_system/limits/block_length.dart' as _i22; +import '../types/frame_system/limits/block_weights.dart' as _i19; +import '../types/frame_system/limits/weights_per_class.dart' as _i21; +import '../types/frame_system/pallet/call.dart' as _i18; +import '../types/frame_system/phase.dart' as _i11; +import '../types/pallet_balances/types/account_data.dart' as _i15; +import '../types/primitive_types/h256.dart' as _i6; +import '../types/quantus_runtime/runtime_call.dart' as _i17; +import '../types/sp_core/crypto/account_id32.dart' as _i2; +import '../types/sp_runtime/generic/digest/digest.dart' as _i7; +import '../types/sp_version/runtime_version.dart' as _i25; +import '../types/sp_weights/runtime_db_weight.dart' as _i24; +import '../types/sp_weights/weight_v2/weight.dart' as _i13; +import '../types/tuples.dart' as _i9; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageMap<_i2.AccountId32, _i3.AccountInfo> _account = + const _i1.StorageMap<_i2.AccountId32, _i3.AccountInfo>( + prefix: 'System', + storage: 'Account', + valueCodec: _i3.AccountInfo.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i2.AccountId32Codec()), + ); + + final _i1.StorageValue _extrinsicCount = const _i1.StorageValue( + prefix: 'System', + storage: 'ExtrinsicCount', + valueCodec: _i4.U32Codec.codec, + ); + + final _i1.StorageValue _inherentsApplied = const _i1.StorageValue( + prefix: 'System', + storage: 'InherentsApplied', + valueCodec: _i4.BoolCodec.codec, + ); + + final _i1.StorageValue<_i5.PerDispatchClass> _blockWeight = const _i1.StorageValue<_i5.PerDispatchClass>( + prefix: 'System', + storage: 'BlockWeight', + valueCodec: _i5.PerDispatchClass.codec, + ); + + final _i1.StorageValue _allExtrinsicsLen = const _i1.StorageValue( + prefix: 'System', + storage: 'AllExtrinsicsLen', + valueCodec: _i4.U32Codec.codec, + ); + + final _i1.StorageMap _blockHash = const _i1.StorageMap( + prefix: 'System', + storage: 'BlockHash', + valueCodec: _i6.H256Codec(), + hasher: _i1.StorageHasher.twoxx64Concat(_i4.U32Codec.codec), + ); + + final _i1.StorageMap> _extrinsicData = const _i1.StorageMap>( + prefix: 'System', + storage: 'ExtrinsicData', + valueCodec: _i4.U8SequenceCodec.codec, + hasher: _i1.StorageHasher.twoxx64Concat(_i4.U32Codec.codec), + ); + + final _i1.StorageValue _number = const _i1.StorageValue( + prefix: 'System', + storage: 'Number', + valueCodec: _i4.U32Codec.codec, + ); + + final _i1.StorageValue<_i6.H256> _parentHash = const _i1.StorageValue<_i6.H256>( + prefix: 'System', + storage: 'ParentHash', + valueCodec: _i6.H256Codec(), + ); + + final _i1.StorageValue<_i7.Digest> _digest = const _i1.StorageValue<_i7.Digest>( + prefix: 'System', + storage: 'Digest', + valueCodec: _i7.Digest.codec, + ); + + final _i1.StorageValue> _events = const _i1.StorageValue>( + prefix: 'System', + storage: 'Events', + valueCodec: _i4.SequenceCodec<_i8.EventRecord>(_i8.EventRecord.codec), + ); + + final _i1.StorageValue _eventCount = const _i1.StorageValue( + prefix: 'System', + storage: 'EventCount', + valueCodec: _i4.U32Codec.codec, + ); + + final _i1.StorageMap<_i6.H256, List<_i9.Tuple2>> _eventTopics = + const _i1.StorageMap<_i6.H256, List<_i9.Tuple2>>( + prefix: 'System', + storage: 'EventTopics', + valueCodec: _i4.SequenceCodec<_i9.Tuple2>( + _i9.Tuple2Codec(_i4.U32Codec.codec, _i4.U32Codec.codec), + ), + hasher: _i1.StorageHasher.blake2b128Concat(_i6.H256Codec()), + ); + + final _i1.StorageValue<_i10.LastRuntimeUpgradeInfo> _lastRuntimeUpgrade = + const _i1.StorageValue<_i10.LastRuntimeUpgradeInfo>( + prefix: 'System', + storage: 'LastRuntimeUpgrade', + valueCodec: _i10.LastRuntimeUpgradeInfo.codec, + ); + + final _i1.StorageValue _upgradedToU32RefCount = const _i1.StorageValue( + prefix: 'System', + storage: 'UpgradedToU32RefCount', + valueCodec: _i4.BoolCodec.codec, + ); + + final _i1.StorageValue _upgradedToTripleRefCount = const _i1.StorageValue( + prefix: 'System', + storage: 'UpgradedToTripleRefCount', + valueCodec: _i4.BoolCodec.codec, + ); + + final _i1.StorageValue<_i11.Phase> _executionPhase = const _i1.StorageValue<_i11.Phase>( + prefix: 'System', + storage: 'ExecutionPhase', + valueCodec: _i11.Phase.codec, + ); + + final _i1.StorageValue<_i12.CodeUpgradeAuthorization> _authorizedUpgrade = + const _i1.StorageValue<_i12.CodeUpgradeAuthorization>( + prefix: 'System', + storage: 'AuthorizedUpgrade', + valueCodec: _i12.CodeUpgradeAuthorization.codec, + ); + + final _i1.StorageValue<_i13.Weight> _extrinsicWeightReclaimed = const _i1.StorageValue<_i13.Weight>( + prefix: 'System', + storage: 'ExtrinsicWeightReclaimed', + valueCodec: _i13.Weight.codec, + ); + + /// The full account information for a particular account ID. + _i14.Future<_i3.AccountInfo> account(_i2.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _account.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _account.decodeValue(bytes); + } + return _i3.AccountInfo( + nonce: 0, + consumers: 0, + providers: 0, + sufficients: 0, + data: _i15.AccountData( + free: BigInt.zero, + reserved: BigInt.zero, + frozen: BigInt.zero, + flags: BigInt.parse('170141183460469231731687303715884105728', radix: 10), + ), + ); /* Default */ + } + + /// Total extrinsics count for the current block. + _i14.Future extrinsicCount({_i1.BlockHash? at}) async { + final hashedKey = _extrinsicCount.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _extrinsicCount.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Whether all inherents have been applied. + _i14.Future inherentsApplied({_i1.BlockHash? at}) async { + final hashedKey = _inherentsApplied.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _inherentsApplied.decodeValue(bytes); + } + return false; /* Default */ + } + + /// The current weight for the block. + _i14.Future<_i5.PerDispatchClass> blockWeight({_i1.BlockHash? at}) async { + final hashedKey = _blockWeight.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _blockWeight.decodeValue(bytes); + } + return _i5.PerDispatchClass( + normal: _i13.Weight(refTime: BigInt.zero, proofSize: BigInt.zero), + operational: _i13.Weight(refTime: BigInt.zero, proofSize: BigInt.zero), + mandatory: _i13.Weight(refTime: BigInt.zero, proofSize: BigInt.zero), + ); /* Default */ + } + + /// Total length (in bytes) for all extrinsics put together, for the current block. + _i14.Future allExtrinsicsLen({_i1.BlockHash? at}) async { + final hashedKey = _allExtrinsicsLen.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _allExtrinsicsLen.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Map of block numbers to block hashes. + _i14.Future<_i6.H256> blockHash(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _blockHash.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _blockHash.decodeValue(bytes); + } + return List.filled(32, 0, growable: false); /* Default */ + } + + /// Extrinsics data for the current block (maps an extrinsic's index to its data). + _i14.Future> extrinsicData(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _extrinsicData.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _extrinsicData.decodeValue(bytes); + } + return List.filled(0, 0, growable: true); /* Default */ + } + + /// The current block number being processed. Set by `execute_block`. + _i14.Future number({_i1.BlockHash? at}) async { + final hashedKey = _number.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _number.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// Hash of the previous block. + _i14.Future<_i6.H256> parentHash({_i1.BlockHash? at}) async { + final hashedKey = _parentHash.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _parentHash.decodeValue(bytes); + } + return List.filled(32, 0, growable: false); /* Default */ + } + + /// Digest of the current block, also part of the block header. + _i14.Future<_i7.Digest> digest({_i1.BlockHash? at}) async { + final hashedKey = _digest.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _digest.decodeValue(bytes); + } + return _i7.Digest(logs: []); /* Default */ + } + + /// Events deposited for the current block. + /// + /// NOTE: The item is unbound and should therefore never be read on chain. + /// It could otherwise inflate the PoV size of a block. + /// + /// Events have a large in-memory size. Box the events to not go out-of-memory + /// just in case someone still reads them from within the runtime. + _i14.Future> events({_i1.BlockHash? at}) async { + final hashedKey = _events.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _events.decodeValue(bytes); + } + return []; /* Default */ + } + + /// The number of events in the `Events` list. + _i14.Future eventCount({_i1.BlockHash? at}) async { + final hashedKey = _eventCount.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _eventCount.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// Mapping between a topic (represented by T::Hash) and a vector of indexes + /// of events in the `>` list. + /// + /// All topic vectors have deterministic storage locations depending on the topic. This + /// allows light-clients to leverage the changes trie storage tracking mechanism and + /// in case of changes fetch the list of events of interest. + /// + /// The value has the type `(BlockNumberFor, EventIndex)` because if we used only just + /// the `EventIndex` then in case if the topic has the same contents on the next block + /// no notification will be triggered thus the event might be lost. + _i14.Future>> eventTopics(_i6.H256 key1, {_i1.BlockHash? at}) async { + final hashedKey = _eventTopics.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _eventTopics.decodeValue(bytes); + } + return []; /* Default */ + } + + /// Stores the `spec_version` and `spec_name` of when the last runtime upgrade happened. + _i14.Future<_i10.LastRuntimeUpgradeInfo?> lastRuntimeUpgrade({_i1.BlockHash? at}) async { + final hashedKey = _lastRuntimeUpgrade.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _lastRuntimeUpgrade.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// True if we have upgraded so that `type RefCount` is `u32`. False (default) if not. + _i14.Future upgradedToU32RefCount({_i1.BlockHash? at}) async { + final hashedKey = _upgradedToU32RefCount.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _upgradedToU32RefCount.decodeValue(bytes); + } + return false; /* Default */ + } + + /// True if we have upgraded so that AccountInfo contains three types of `RefCount`. False + /// (default) if not. + _i14.Future upgradedToTripleRefCount({_i1.BlockHash? at}) async { + final hashedKey = _upgradedToTripleRefCount.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _upgradedToTripleRefCount.decodeValue(bytes); + } + return false; /* Default */ + } + + /// The execution phase of the block. + _i14.Future<_i11.Phase?> executionPhase({_i1.BlockHash? at}) async { + final hashedKey = _executionPhase.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _executionPhase.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// `Some` if a code upgrade has been authorized. + _i14.Future<_i12.CodeUpgradeAuthorization?> authorizedUpgrade({_i1.BlockHash? at}) async { + final hashedKey = _authorizedUpgrade.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _authorizedUpgrade.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The weight reclaimed for the extrinsic. + /// + /// This information is available until the end of the extrinsic execution. + /// More precisely this information is removed in `note_applied_extrinsic`. + /// + /// Logic doing some post dispatch weight reduction must update this storage to avoid duplicate + /// reduction. + _i14.Future<_i13.Weight> extrinsicWeightReclaimed({_i1.BlockHash? at}) async { + final hashedKey = _extrinsicWeightReclaimed.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _extrinsicWeightReclaimed.decodeValue(bytes); + } + return _i13.Weight(refTime: BigInt.zero, proofSize: BigInt.zero); /* Default */ + } + + /// The full account information for a particular account ID. + _i14.Future> multiAccount(List<_i2.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _account.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _account.decodeValue(v.key)).toList(); + } + return (keys + .map( + (key) => _i3.AccountInfo( + nonce: 0, + consumers: 0, + providers: 0, + sufficients: 0, + data: _i15.AccountData( + free: BigInt.zero, + reserved: BigInt.zero, + frozen: BigInt.zero, + flags: BigInt.parse('170141183460469231731687303715884105728', radix: 10), + ), + ), + ) + .toList() + as List<_i3.AccountInfo>); /* Default */ + } + + /// Map of block numbers to block hashes. + _i14.Future> multiBlockHash(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _blockHash.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _blockHash.decodeValue(v.key)).toList(); + } + return (keys.map((key) => List.filled(32, 0, growable: false)).toList() as List<_i6.H256>); /* Default */ + } + + /// Extrinsics data for the current block (maps an extrinsic's index to its data). + _i14.Future>> multiExtrinsicData(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _extrinsicData.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _extrinsicData.decodeValue(v.key)).toList(); + } + return (keys.map((key) => List.filled(0, 0, growable: true)).toList() as List>); /* Default */ + } + + /// Mapping between a topic (represented by T::Hash) and a vector of indexes + /// of events in the `>` list. + /// + /// All topic vectors have deterministic storage locations depending on the topic. This + /// allows light-clients to leverage the changes trie storage tracking mechanism and + /// in case of changes fetch the list of events of interest. + /// + /// The value has the type `(BlockNumberFor, EventIndex)` because if we used only just + /// the `EventIndex` then in case if the topic has the same contents on the next block + /// no notification will be triggered thus the event might be lost. + _i14.Future>>> multiEventTopics(List<_i6.H256> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _eventTopics.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _eventTopics.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>>); /* Default */ + } + + /// Returns the storage key for `account`. + _i16.Uint8List accountKey(_i2.AccountId32 key1) { + final hashedKey = _account.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `extrinsicCount`. + _i16.Uint8List extrinsicCountKey() { + final hashedKey = _extrinsicCount.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `inherentsApplied`. + _i16.Uint8List inherentsAppliedKey() { + final hashedKey = _inherentsApplied.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `blockWeight`. + _i16.Uint8List blockWeightKey() { + final hashedKey = _blockWeight.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `allExtrinsicsLen`. + _i16.Uint8List allExtrinsicsLenKey() { + final hashedKey = _allExtrinsicsLen.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `blockHash`. + _i16.Uint8List blockHashKey(int key1) { + final hashedKey = _blockHash.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `extrinsicData`. + _i16.Uint8List extrinsicDataKey(int key1) { + final hashedKey = _extrinsicData.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `number`. + _i16.Uint8List numberKey() { + final hashedKey = _number.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `parentHash`. + _i16.Uint8List parentHashKey() { + final hashedKey = _parentHash.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `digest`. + _i16.Uint8List digestKey() { + final hashedKey = _digest.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `events`. + _i16.Uint8List eventsKey() { + final hashedKey = _events.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `eventCount`. + _i16.Uint8List eventCountKey() { + final hashedKey = _eventCount.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `eventTopics`. + _i16.Uint8List eventTopicsKey(_i6.H256 key1) { + final hashedKey = _eventTopics.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `lastRuntimeUpgrade`. + _i16.Uint8List lastRuntimeUpgradeKey() { + final hashedKey = _lastRuntimeUpgrade.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `upgradedToU32RefCount`. + _i16.Uint8List upgradedToU32RefCountKey() { + final hashedKey = _upgradedToU32RefCount.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `upgradedToTripleRefCount`. + _i16.Uint8List upgradedToTripleRefCountKey() { + final hashedKey = _upgradedToTripleRefCount.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `executionPhase`. + _i16.Uint8List executionPhaseKey() { + final hashedKey = _executionPhase.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `authorizedUpgrade`. + _i16.Uint8List authorizedUpgradeKey() { + final hashedKey = _authorizedUpgrade.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `extrinsicWeightReclaimed`. + _i16.Uint8List extrinsicWeightReclaimedKey() { + final hashedKey = _extrinsicWeightReclaimed.hashedKey(); + return hashedKey; + } + + /// Returns the storage map key prefix for `account`. + _i16.Uint8List accountMapPrefix() { + final hashedKey = _account.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `blockHash`. + _i16.Uint8List blockHashMapPrefix() { + final hashedKey = _blockHash.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `extrinsicData`. + _i16.Uint8List extrinsicDataMapPrefix() { + final hashedKey = _extrinsicData.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `eventTopics`. + _i16.Uint8List eventTopicsMapPrefix() { + final hashedKey = _eventTopics.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Make some on-chain remark. + /// + /// Can be executed by every `origin`. + _i17.System remark({required List remark}) { + return _i17.System(_i18.Remark(remark: remark)); + } + + /// Set the number of pages in the WebAssembly environment's heap. + _i17.System setHeapPages({required BigInt pages}) { + return _i17.System(_i18.SetHeapPages(pages: pages)); + } + + /// Set the new runtime code. + _i17.System setCode({required List code}) { + return _i17.System(_i18.SetCode(code: code)); + } + + /// Set the new runtime code without doing any checks of the given `code`. + /// + /// Note that runtime upgrades will not run if this is called with a not-increasing spec + /// version! + _i17.System setCodeWithoutChecks({required List code}) { + return _i17.System(_i18.SetCodeWithoutChecks(code: code)); + } + + /// Set some items of storage. + _i17.System setStorage({required List<_i9.Tuple2, List>> items}) { + return _i17.System(_i18.SetStorage(items: items)); + } + + /// Kill some items from storage. + _i17.System killStorage({required List> keys}) { + return _i17.System(_i18.KillStorage(keys: keys)); + } + + /// Kill all storage items with a key that starts with the given prefix. + /// + /// **NOTE:** We rely on the Root origin to provide us the number of subkeys under + /// the prefix we are removing to accurately calculate the weight of this function. + _i17.System killPrefix({required List prefix, required int subkeys}) { + return _i17.System(_i18.KillPrefix(prefix: prefix, subkeys: subkeys)); + } + + /// Make some on-chain remark and emit event. + _i17.System remarkWithEvent({required List remark}) { + return _i17.System(_i18.RemarkWithEvent(remark: remark)); + } + + /// Authorize an upgrade to a given `code_hash` for the runtime. The runtime can be supplied + /// later. + /// + /// This call requires Root origin. + _i17.System authorizeUpgrade({required _i6.H256 codeHash}) { + return _i17.System(_i18.AuthorizeUpgrade(codeHash: codeHash)); + } + + /// Authorize an upgrade to a given `code_hash` for the runtime. The runtime can be supplied + /// later. + /// + /// WARNING: This authorizes an upgrade that will take place without any safety checks, for + /// example that the spec name remains the same and that the version number increases. Not + /// recommended for normal use. Use `authorize_upgrade` instead. + /// + /// This call requires Root origin. + _i17.System authorizeUpgradeWithoutChecks({required _i6.H256 codeHash}) { + return _i17.System(_i18.AuthorizeUpgradeWithoutChecks(codeHash: codeHash)); + } + + /// Provide the preimage (runtime binary) `code` for an upgrade that has been authorized. + /// + /// If the authorization required a version check, this call will ensure the spec name + /// remains unchanged and that the spec version has increased. + /// + /// Depending on the runtime's `OnSetCode` configuration, this function may directly apply + /// the new `code` in the same block or attempt to schedule the upgrade. + /// + /// All origins are allowed. + _i17.System applyAuthorizedUpgrade({required List code}) { + return _i17.System(_i18.ApplyAuthorizedUpgrade(code: code)); + } +} + +class Constants { + Constants(); + + /// Block & extrinsics weights: base values and limits. + final _i19.BlockWeights blockWeights = _i19.BlockWeights( + baseBlock: _i13.Weight(refTime: BigInt.from(431614000), proofSize: BigInt.zero), + maxBlock: _i13.Weight( + refTime: BigInt.from(6000000000000), + proofSize: BigInt.parse('18446744073709551615', radix: 10), + ), + perClass: _i20.PerDispatchClass( + normal: _i21.WeightsPerClass( + baseExtrinsic: _i13.Weight(refTime: BigInt.from(108157000), proofSize: BigInt.zero), + maxExtrinsic: _i13.Weight( + refTime: BigInt.from(3899891843000), + proofSize: BigInt.parse('11990383647911208550', radix: 10), + ), + maxTotal: _i13.Weight( + refTime: BigInt.from(4500000000000), + proofSize: BigInt.parse('13835058055282163711', radix: 10), + ), + reserved: _i13.Weight(refTime: BigInt.zero, proofSize: BigInt.zero), + ), + operational: _i21.WeightsPerClass( + baseExtrinsic: _i13.Weight(refTime: BigInt.from(108157000), proofSize: BigInt.zero), + maxExtrinsic: _i13.Weight( + refTime: BigInt.from(5399891843000), + proofSize: BigInt.parse('16602069666338596454', radix: 10), + ), + maxTotal: _i13.Weight( + refTime: BigInt.from(6000000000000), + proofSize: BigInt.parse('18446744073709551615', radix: 10), + ), + reserved: _i13.Weight( + refTime: BigInt.from(1500000000000), + proofSize: BigInt.parse('4611686018427387904', radix: 10), + ), + ), + mandatory: _i21.WeightsPerClass( + baseExtrinsic: _i13.Weight(refTime: BigInt.from(108157000), proofSize: BigInt.zero), + maxExtrinsic: null, + maxTotal: null, + reserved: null, + ), + ), + ); + + /// The maximum length of a block (in bytes). + final _i22.BlockLength blockLength = const _i22.BlockLength( + max: _i23.PerDispatchClass(normal: 3932160, operational: 5242880, mandatory: 5242880), + ); + + /// Maximum number of block number to block hash mappings to keep (oldest pruned first). + final int blockHashCount = 4096; + + /// The weight of runtime database operations the runtime can invoke. + final _i24.RuntimeDbWeight dbWeight = _i24.RuntimeDbWeight( + read: BigInt.from(25000000), + write: BigInt.from(100000000), + ); + + /// Get the chain's in-code version. + final _i25.RuntimeVersion version = const _i25.RuntimeVersion( + specName: 'quantus-runtime', + implName: 'quantus-runtime', + authoringVersion: 1, + specVersion: 117, + implVersion: 1, + apis: [ + _i9.Tuple2, int>([223, 106, 203, 104, 153, 7, 96, 155], 5), + _i9.Tuple2, int>([55, 227, 151, 252, 124, 145, 245, 228], 2), + _i9.Tuple2, int>([64, 254, 58, 212, 1, 248, 149, 154], 6), + _i9.Tuple2, int>([210, 188, 152, 151, 238, 208, 143, 21], 3), + _i9.Tuple2, int>([247, 139, 39, 139, 229, 63, 69, 76], 2), + _i9.Tuple2, int>([171, 60, 5, 114, 41, 31, 235, 139], 1), + _i9.Tuple2, int>([19, 40, 169, 252, 46, 48, 6, 19], 1), + _i9.Tuple2, int>([188, 157, 137, 144, 79, 91, 146, 63], 1), + _i9.Tuple2, int>([55, 200, 187, 19, 80, 169, 162, 168], 4), + _i9.Tuple2, int>([243, 255, 20, 213, 171, 82, 112, 89], 3), + _i9.Tuple2, int>([251, 197, 119, 185, 215, 71, 239, 214], 1), + ], + transactionVersion: 2, + systemVersion: 1, + ); + + /// The designated SS58 prefix of this chain. + /// + /// This replaces the "ss58Format" property declared in the chain spec. Reason is + /// that the runtime should know about the prefix in order to make use of it as + /// an identifier of the chain. + final int sS58Prefix = 189; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/tech_collective.dart b/quantus_sdk/lib/generated/planck/pallets/tech_collective.dart new file mode 100644 index 00000000..61efa344 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/tech_collective.dart @@ -0,0 +1,315 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i6; +import 'dart:typed_data' as _i7; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/pallet_ranked_collective/member_record.dart' as _i4; +import '../types/pallet_ranked_collective/pallet/call.dart' as _i10; +import '../types/pallet_ranked_collective/vote_record.dart' as _i5; +import '../types/quantus_runtime/runtime_call.dart' as _i8; +import '../types/sp_core/crypto/account_id32.dart' as _i3; +import '../types/sp_runtime/multiaddress/multi_address.dart' as _i9; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageMap _memberCount = const _i1.StorageMap( + prefix: 'TechCollective', + storage: 'MemberCount', + valueCodec: _i2.U32Codec.codec, + hasher: _i1.StorageHasher.twoxx64Concat(_i2.U16Codec.codec), + ); + + final _i1.StorageMap<_i3.AccountId32, _i4.MemberRecord> _members = + const _i1.StorageMap<_i3.AccountId32, _i4.MemberRecord>( + prefix: 'TechCollective', + storage: 'Members', + valueCodec: _i4.MemberRecord.codec, + hasher: _i1.StorageHasher.twoxx64Concat(_i3.AccountId32Codec()), + ); + + final _i1.StorageDoubleMap _idToIndex = + const _i1.StorageDoubleMap( + prefix: 'TechCollective', + storage: 'IdToIndex', + valueCodec: _i2.U32Codec.codec, + hasher1: _i1.StorageHasher.twoxx64Concat(_i2.U16Codec.codec), + hasher2: _i1.StorageHasher.twoxx64Concat(_i3.AccountId32Codec()), + ); + + final _i1.StorageDoubleMap _indexToId = + const _i1.StorageDoubleMap( + prefix: 'TechCollective', + storage: 'IndexToId', + valueCodec: _i3.AccountId32Codec(), + hasher1: _i1.StorageHasher.twoxx64Concat(_i2.U16Codec.codec), + hasher2: _i1.StorageHasher.twoxx64Concat(_i2.U32Codec.codec), + ); + + final _i1.StorageDoubleMap _voting = + const _i1.StorageDoubleMap( + prefix: 'TechCollective', + storage: 'Voting', + valueCodec: _i5.VoteRecord.codec, + hasher1: _i1.StorageHasher.blake2b128Concat(_i2.U32Codec.codec), + hasher2: _i1.StorageHasher.twoxx64Concat(_i3.AccountId32Codec()), + ); + + final _i1.StorageMap> _votingCleanup = const _i1.StorageMap>( + prefix: 'TechCollective', + storage: 'VotingCleanup', + valueCodec: _i2.U8SequenceCodec.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i2.U32Codec.codec), + ); + + /// The number of members in the collective who have at least the rank according to the index + /// of the vec. + _i6.Future memberCount(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _memberCount.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _memberCount.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// The current members of the collective. + _i6.Future<_i4.MemberRecord?> members(_i3.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _members.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _members.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The index of each ranks's member into the group of members who have at least that rank. + _i6.Future idToIndex(int key1, _i3.AccountId32 key2, {_i1.BlockHash? at}) async { + final hashedKey = _idToIndex.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _idToIndex.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The members in the collective by index. All indices in the range `0..MemberCount` will + /// return `Some`, however a member's index is not guaranteed to remain unchanged over time. + _i6.Future<_i3.AccountId32?> indexToId(int key1, int key2, {_i1.BlockHash? at}) async { + final hashedKey = _indexToId.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _indexToId.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Votes on a given proposal, if it is ongoing. + _i6.Future<_i5.VoteRecord?> voting(int key1, _i3.AccountId32 key2, {_i1.BlockHash? at}) async { + final hashedKey = _voting.hashedKeyFor(key1, key2); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _voting.decodeValue(bytes); + } + return null; /* Nullable */ + } + + _i6.Future?> votingCleanup(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _votingCleanup.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _votingCleanup.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The number of members in the collective who have at least the rank according to the index + /// of the vec. + _i6.Future> multiMemberCount(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _memberCount.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _memberCount.decodeValue(v.key)).toList(); + } + return (keys.map((key) => 0).toList() as List); /* Default */ + } + + /// The current members of the collective. + _i6.Future> multiMembers(List<_i3.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _members.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _members.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + _i6.Future?>> multiVotingCleanup(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _votingCleanup.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _votingCleanup.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Returns the storage key for `memberCount`. + _i7.Uint8List memberCountKey(int key1) { + final hashedKey = _memberCount.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `members`. + _i7.Uint8List membersKey(_i3.AccountId32 key1) { + final hashedKey = _members.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `idToIndex`. + _i7.Uint8List idToIndexKey(int key1, _i3.AccountId32 key2) { + final hashedKey = _idToIndex.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage key for `indexToId`. + _i7.Uint8List indexToIdKey(int key1, int key2) { + final hashedKey = _indexToId.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage key for `voting`. + _i7.Uint8List votingKey(int key1, _i3.AccountId32 key2) { + final hashedKey = _voting.hashedKeyFor(key1, key2); + return hashedKey; + } + + /// Returns the storage key for `votingCleanup`. + _i7.Uint8List votingCleanupKey(int key1) { + final hashedKey = _votingCleanup.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `memberCount`. + _i7.Uint8List memberCountMapPrefix() { + final hashedKey = _memberCount.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `members`. + _i7.Uint8List membersMapPrefix() { + final hashedKey = _members.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `idToIndex`. + _i7.Uint8List idToIndexMapPrefix(int key1) { + final hashedKey = _idToIndex.mapPrefix(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `indexToId`. + _i7.Uint8List indexToIdMapPrefix(int key1) { + final hashedKey = _indexToId.mapPrefix(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `voting`. + _i7.Uint8List votingMapPrefix(int key1) { + final hashedKey = _voting.mapPrefix(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `votingCleanup`. + _i7.Uint8List votingCleanupMapPrefix() { + final hashedKey = _votingCleanup.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Introduce a new member. + /// + /// - `origin`: Must be the `AddOrigin`. + /// - `who`: Account of non-member which will become a member. + /// + /// Weight: `O(1)` + _i8.TechCollective addMember({required _i9.MultiAddress who}) { + return _i8.TechCollective(_i10.AddMember(who: who)); + } + + /// Increment the rank of an existing member by one. + /// + /// - `origin`: Must be the `PromoteOrigin`. + /// - `who`: Account of existing member. + /// + /// Weight: `O(1)` + _i8.TechCollective promoteMember({required _i9.MultiAddress who}) { + return _i8.TechCollective(_i10.PromoteMember(who: who)); + } + + /// Decrement the rank of an existing member by one. If the member is already at rank zero, + /// then they are removed entirely. + /// + /// - `origin`: Must be the `DemoteOrigin`. + /// - `who`: Account of existing member of rank greater than zero. + /// + /// Weight: `O(1)`, less if the member's index is highest in its rank. + _i8.TechCollective demoteMember({required _i9.MultiAddress who}) { + return _i8.TechCollective(_i10.DemoteMember(who: who)); + } + + /// Remove the member entirely. + /// + /// - `origin`: Must be the `RemoveOrigin`. + /// - `who`: Account of existing member of rank greater than zero. + /// - `min_rank`: The rank of the member or greater. + /// + /// Weight: `O(min_rank)`. + _i8.TechCollective removeMember({required _i9.MultiAddress who, required int minRank}) { + return _i8.TechCollective(_i10.RemoveMember(who: who, minRank: minRank)); + } + + /// Add an aye or nay vote for the sender to the given proposal. + /// + /// - `origin`: Must be `Signed` by a member account. + /// - `poll`: Index of a poll which is ongoing. + /// - `aye`: `true` if the vote is to approve the proposal, `false` otherwise. + /// + /// Transaction fees are be waived if the member is voting on any particular proposal + /// for the first time and the call is successful. Subsequent vote changes will charge a + /// fee. + /// + /// Weight: `O(1)`, less if there was no previous vote on the poll by the member. + _i8.TechCollective vote({required int poll, required bool aye}) { + return _i8.TechCollective(_i10.Vote(poll: poll, aye: aye)); + } + + /// Remove votes from the given poll. It must have ended. + /// + /// - `origin`: Must be `Signed` by any account. + /// - `poll_index`: Index of a poll which is completed and for which votes continue to + /// exist. + /// - `max`: Maximum number of vote items from remove in this call. + /// + /// Transaction fees are waived if the operation is successful. + /// + /// Weight `O(max)` (less if there are fewer items to remove than `max`). + _i8.TechCollective cleanupPoll({required int pollIndex, required int max}) { + return _i8.TechCollective(_i10.CleanupPoll(pollIndex: pollIndex, max: max)); + } + + /// Exchanges a member with a new account and the same existing rank. + /// + /// - `origin`: Must be the `ExchangeOrigin`. + /// - `who`: Account of existing member of rank greater than zero to be exchanged. + /// - `new_who`: New Account of existing member of rank greater than zero to exchanged to. + _i8.TechCollective exchangeMember({required _i9.MultiAddress who, required _i9.MultiAddress newWho}) { + return _i8.TechCollective(_i10.ExchangeMember(who: who, newWho: newWho)); + } +} diff --git a/quantus_sdk/lib/generated/planck/pallets/tech_referenda.dart b/quantus_sdk/lib/generated/planck/pallets/tech_referenda.dart new file mode 100644 index 00000000..25a942a9 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/tech_referenda.dart @@ -0,0 +1,368 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i6; +import 'dart:typed_data' as _i7; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/frame_support/traits/preimages/bounded.dart' as _i10; +import '../types/frame_support/traits/schedule/dispatch_time.dart' as _i11; +import '../types/pallet_referenda/pallet/call_2.dart' as _i12; +import '../types/pallet_referenda/types/curve.dart' as _i14; +import '../types/pallet_referenda/types/referendum_info_2.dart' as _i3; +import '../types/pallet_referenda/types/track_details.dart' as _i13; +import '../types/primitive_types/h256.dart' as _i5; +import '../types/quantus_runtime/origin_caller.dart' as _i9; +import '../types/quantus_runtime/runtime_call.dart' as _i8; +import '../types/tuples.dart' as _i4; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue _referendumCount = const _i1.StorageValue( + prefix: 'TechReferenda', + storage: 'ReferendumCount', + valueCodec: _i2.U32Codec.codec, + ); + + final _i1.StorageMap _referendumInfoFor = const _i1.StorageMap( + prefix: 'TechReferenda', + storage: 'ReferendumInfoFor', + valueCodec: _i3.ReferendumInfo.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i2.U32Codec.codec), + ); + + final _i1.StorageMap>> _trackQueue = + const _i1.StorageMap>>( + prefix: 'TechReferenda', + storage: 'TrackQueue', + valueCodec: _i2.SequenceCodec<_i4.Tuple2>( + _i4.Tuple2Codec(_i2.U32Codec.codec, _i2.U32Codec.codec), + ), + hasher: _i1.StorageHasher.twoxx64Concat(_i2.U16Codec.codec), + ); + + final _i1.StorageMap _decidingCount = const _i1.StorageMap( + prefix: 'TechReferenda', + storage: 'DecidingCount', + valueCodec: _i2.U32Codec.codec, + hasher: _i1.StorageHasher.twoxx64Concat(_i2.U16Codec.codec), + ); + + final _i1.StorageMap _metadataOf = const _i1.StorageMap( + prefix: 'TechReferenda', + storage: 'MetadataOf', + valueCodec: _i5.H256Codec(), + hasher: _i1.StorageHasher.blake2b128Concat(_i2.U32Codec.codec), + ); + + /// The next free referendum index, aka the number of referenda started so far. + _i6.Future referendumCount({_i1.BlockHash? at}) async { + final hashedKey = _referendumCount.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _referendumCount.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// Information concerning any given referendum. + _i6.Future<_i3.ReferendumInfo?> referendumInfoFor(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _referendumInfoFor.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _referendumInfoFor.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The sorted list of referenda ready to be decided but not yet being decided, ordered by + /// conviction-weighted approvals. + /// + /// This should be empty if `DecidingCount` is less than `TrackInfo::max_deciding`. + _i6.Future>> trackQueue(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _trackQueue.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _trackQueue.decodeValue(bytes); + } + return []; /* Default */ + } + + /// The number of referenda being decided currently. + _i6.Future decidingCount(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _decidingCount.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _decidingCount.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// The metadata is a general information concerning the referendum. + /// The `Hash` refers to the preimage of the `Preimages` provider which can be a JSON + /// dump or IPFS hash of a JSON file. + /// + /// Consider a garbage collection for a metadata of finished referendums to `unrequest` (remove) + /// large preimages. + _i6.Future<_i5.H256?> metadataOf(int key1, {_i1.BlockHash? at}) async { + final hashedKey = _metadataOf.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _metadataOf.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Information concerning any given referendum. + _i6.Future> multiReferendumInfoFor(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _referendumInfoFor.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _referendumInfoFor.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// The sorted list of referenda ready to be decided but not yet being decided, ordered by + /// conviction-weighted approvals. + /// + /// This should be empty if `DecidingCount` is less than `TrackInfo::max_deciding`. + _i6.Future>>> multiTrackQueue(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _trackQueue.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _trackQueue.decodeValue(v.key)).toList(); + } + return (keys.map((key) => []).toList() as List>>); /* Default */ + } + + /// The number of referenda being decided currently. + _i6.Future> multiDecidingCount(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _decidingCount.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _decidingCount.decodeValue(v.key)).toList(); + } + return (keys.map((key) => 0).toList() as List); /* Default */ + } + + /// The metadata is a general information concerning the referendum. + /// The `Hash` refers to the preimage of the `Preimages` provider which can be a JSON + /// dump or IPFS hash of a JSON file. + /// + /// Consider a garbage collection for a metadata of finished referendums to `unrequest` (remove) + /// large preimages. + _i6.Future> multiMetadataOf(List keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _metadataOf.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _metadataOf.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Returns the storage key for `referendumCount`. + _i7.Uint8List referendumCountKey() { + final hashedKey = _referendumCount.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `referendumInfoFor`. + _i7.Uint8List referendumInfoForKey(int key1) { + final hashedKey = _referendumInfoFor.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `trackQueue`. + _i7.Uint8List trackQueueKey(int key1) { + final hashedKey = _trackQueue.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `decidingCount`. + _i7.Uint8List decidingCountKey(int key1) { + final hashedKey = _decidingCount.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `metadataOf`. + _i7.Uint8List metadataOfKey(int key1) { + final hashedKey = _metadataOf.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `referendumInfoFor`. + _i7.Uint8List referendumInfoForMapPrefix() { + final hashedKey = _referendumInfoFor.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `trackQueue`. + _i7.Uint8List trackQueueMapPrefix() { + final hashedKey = _trackQueue.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `decidingCount`. + _i7.Uint8List decidingCountMapPrefix() { + final hashedKey = _decidingCount.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `metadataOf`. + _i7.Uint8List metadataOfMapPrefix() { + final hashedKey = _metadataOf.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Propose a referendum on a privileged action. + /// + /// - `origin`: must be `SubmitOrigin` and the account must have `SubmissionDeposit` funds + /// available. + /// - `proposal_origin`: The origin from which the proposal should be executed. + /// - `proposal`: The proposal. + /// - `enactment_moment`: The moment that the proposal should be enacted. + /// + /// Emits `Submitted`. + _i8.TechReferenda submit({ + required _i9.OriginCaller proposalOrigin, + required _i10.Bounded proposal, + required _i11.DispatchTime enactmentMoment, + }) { + return _i8.TechReferenda( + _i12.Submit(proposalOrigin: proposalOrigin, proposal: proposal, enactmentMoment: enactmentMoment), + ); + } + + /// Post the Decision Deposit for a referendum. + /// + /// - `origin`: must be `Signed` and the account must have funds available for the + /// referendum's track's Decision Deposit. + /// - `index`: The index of the submitted referendum whose Decision Deposit is yet to be + /// posted. + /// + /// Emits `DecisionDepositPlaced`. + _i8.TechReferenda placeDecisionDeposit({required int index}) { + return _i8.TechReferenda(_i12.PlaceDecisionDeposit(index: index)); + } + + /// Refund the Decision Deposit for a closed referendum back to the depositor. + /// + /// - `origin`: must be `Signed` or `Root`. + /// - `index`: The index of a closed referendum whose Decision Deposit has not yet been + /// refunded. + /// + /// Emits `DecisionDepositRefunded`. + _i8.TechReferenda refundDecisionDeposit({required int index}) { + return _i8.TechReferenda(_i12.RefundDecisionDeposit(index: index)); + } + + /// Cancel an ongoing referendum. + /// + /// - `origin`: must be the `CancelOrigin`. + /// - `index`: The index of the referendum to be cancelled. + /// + /// Emits `Cancelled`. + _i8.TechReferenda cancel({required int index}) { + return _i8.TechReferenda(_i12.Cancel(index: index)); + } + + /// Cancel an ongoing referendum and slash the deposits. + /// + /// - `origin`: must be the `KillOrigin`. + /// - `index`: The index of the referendum to be cancelled. + /// + /// Emits `Killed` and `DepositSlashed`. + _i8.TechReferenda kill({required int index}) { + return _i8.TechReferenda(_i12.Kill(index: index)); + } + + /// Advance a referendum onto its next logical state. Only used internally. + /// + /// - `origin`: must be `Root`. + /// - `index`: the referendum to be advanced. + _i8.TechReferenda nudgeReferendum({required int index}) { + return _i8.TechReferenda(_i12.NudgeReferendum(index: index)); + } + + /// Advance a track onto its next logical state. Only used internally. + /// + /// - `origin`: must be `Root`. + /// - `track`: the track to be advanced. + /// + /// Action item for when there is now one fewer referendum in the deciding phase and the + /// `DecidingCount` is not yet updated. This means that we should either: + /// - begin deciding another referendum (and leave `DecidingCount` alone); or + /// - decrement `DecidingCount`. + _i8.TechReferenda oneFewerDeciding({required int track}) { + return _i8.TechReferenda(_i12.OneFewerDeciding(track: track)); + } + + /// Refund the Submission Deposit for a closed referendum back to the depositor. + /// + /// - `origin`: must be `Signed` or `Root`. + /// - `index`: The index of a closed referendum whose Submission Deposit has not yet been + /// refunded. + /// + /// Emits `SubmissionDepositRefunded`. + _i8.TechReferenda refundSubmissionDeposit({required int index}) { + return _i8.TechReferenda(_i12.RefundSubmissionDeposit(index: index)); + } + + /// Set or clear metadata of a referendum. + /// + /// Parameters: + /// - `origin`: Must be `Signed` by a creator of a referendum or by anyone to clear a + /// metadata of a finished referendum. + /// - `index`: The index of a referendum to set or clear metadata for. + /// - `maybe_hash`: The hash of an on-chain stored preimage. `None` to clear a metadata. + _i8.TechReferenda setMetadata({required int index, _i5.H256? maybeHash}) { + return _i8.TechReferenda(_i12.SetMetadata(index: index, maybeHash: maybeHash)); + } +} + +class Constants { + Constants(); + + /// The minimum amount to be used as a deposit for a public referendum proposal. + final BigInt submissionDeposit = BigInt.from(100000000000000); + + /// Maximum size of the referendum queue for a single track. + final int maxQueued = 100; + + /// The number of blocks after submission that a referendum must begin being decided by. + /// Once this passes, then anyone may cancel the referendum. + final int undecidingTimeout = 324000; + + /// Quantization level for the referendum wakeup scheduler. A higher number will result in + /// fewer storage reads/writes needed for smaller voters, but also result in delays to the + /// automatic referendum status changes. Explicit servicing instructions are unaffected. + final int alarmInterval = 1; + + /// A list of tracks. + /// + /// Note: if the tracks are dynamic, the value in the static metadata might be inaccurate. + final List<_i4.Tuple2> tracks = [ + _i4.Tuple2( + 0, + _i13.TrackDetails( + name: 'tech_collective_members', + maxDeciding: 1, + decisionDeposit: BigInt.from(1000000000000000), + preparePeriod: 100, + decisionPeriod: 7200, + confirmPeriod: 100, + minEnactmentPeriod: 100, + minApproval: const _i14.LinearDecreasing(length: 1000000000, floor: 500000000, ceil: 1000000000), + minSupport: const _i14.LinearDecreasing(length: 1000000000, floor: 0, ceil: 0), + ), + ), + ]; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/timestamp.dart b/quantus_sdk/lib/generated/planck/pallets/timestamp.dart new file mode 100644 index 00000000..5f716696 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/timestamp.dart @@ -0,0 +1,101 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i3; +import 'dart:typed_data' as _i4; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/pallet_timestamp/pallet/call.dart' as _i6; +import '../types/quantus_runtime/runtime_call.dart' as _i5; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue _now = const _i1.StorageValue( + prefix: 'Timestamp', + storage: 'Now', + valueCodec: _i2.U64Codec.codec, + ); + + final _i1.StorageValue _didUpdate = const _i1.StorageValue( + prefix: 'Timestamp', + storage: 'DidUpdate', + valueCodec: _i2.BoolCodec.codec, + ); + + /// The current time for the current block. + _i3.Future now({_i1.BlockHash? at}) async { + final hashedKey = _now.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _now.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + /// Whether the timestamp has been updated in this block. + /// + /// This value is updated to `true` upon successful submission of a timestamp by a node. + /// It is then checked at the end of each block execution in the `on_finalize` hook. + _i3.Future didUpdate({_i1.BlockHash? at}) async { + final hashedKey = _didUpdate.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _didUpdate.decodeValue(bytes); + } + return false; /* Default */ + } + + /// Returns the storage key for `now`. + _i4.Uint8List nowKey() { + final hashedKey = _now.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `didUpdate`. + _i4.Uint8List didUpdateKey() { + final hashedKey = _didUpdate.hashedKey(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Set the current time. + /// + /// This call should be invoked exactly once per block. It will panic at the finalization + /// phase, if this call hasn't been invoked by that time. + /// + /// The timestamp should be greater than the previous one by the amount specified by + /// [`Config::MinimumPeriod`]. + /// + /// The dispatch origin for this call must be _None_. + /// + /// This dispatch class is _Mandatory_ to ensure it gets executed in the block. Be aware + /// that changing the complexity of this call could result exhausting the resources in a + /// block to execute any other calls. + /// + /// ## Complexity + /// - `O(1)` (Note that implementations of `OnTimestampSet` must also be `O(1)`) + /// - 1 storage read and 1 storage mutation (codec `O(1)` because of `DidUpdate::take` in + /// `on_finalize`) + /// - 1 event handler `on_timestamp_set`. Must be `O(1)`. + _i5.Timestamp set({required BigInt now}) { + return _i5.Timestamp(_i6.Set(now: now)); + } +} + +class Constants { + Constants(); + + /// The minimum period between blocks. + /// + /// Be aware that this is different to the *expected* period that the block production + /// apparatus provides. Your chosen consensus system will generally work with this to + /// determine a sensible block time. For example, in the Aura pallet it will be double this + /// period on default settings. + final BigInt minimumPeriod = BigInt.from(100); +} diff --git a/quantus_sdk/lib/generated/planck/pallets/transaction_payment.dart b/quantus_sdk/lib/generated/planck/pallets/transaction_payment.dart new file mode 100644 index 00000000..81c2bb8e --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/transaction_payment.dart @@ -0,0 +1,83 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i4; +import 'dart:typed_data' as _i5; + +import 'package:polkadart/polkadart.dart' as _i1; + +import '../types/pallet_transaction_payment/releases.dart' as _i3; +import '../types/sp_arithmetic/fixed_point/fixed_u128.dart' as _i2; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue<_i2.FixedU128> _nextFeeMultiplier = const _i1.StorageValue<_i2.FixedU128>( + prefix: 'TransactionPayment', + storage: 'NextFeeMultiplier', + valueCodec: _i2.FixedU128Codec(), + ); + + final _i1.StorageValue<_i3.Releases> _storageVersion = const _i1.StorageValue<_i3.Releases>( + prefix: 'TransactionPayment', + storage: 'StorageVersion', + valueCodec: _i3.Releases.codec, + ); + + _i4.Future<_i2.FixedU128> nextFeeMultiplier({_i1.BlockHash? at}) async { + final hashedKey = _nextFeeMultiplier.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _nextFeeMultiplier.decodeValue(bytes); + } + return BigInt.parse('1000000000000000000', radix: 10); /* Default */ + } + + _i4.Future<_i3.Releases> storageVersion({_i1.BlockHash? at}) async { + final hashedKey = _storageVersion.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _storageVersion.decodeValue(bytes); + } + return _i3.Releases.v1Ancient; /* Default */ + } + + /// Returns the storage key for `nextFeeMultiplier`. + _i5.Uint8List nextFeeMultiplierKey() { + final hashedKey = _nextFeeMultiplier.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `storageVersion`. + _i5.Uint8List storageVersionKey() { + final hashedKey = _storageVersion.hashedKey(); + return hashedKey; + } +} + +class Constants { + Constants(); + + /// A fee multiplier for `Operational` extrinsics to compute "virtual tip" to boost their + /// `priority` + /// + /// This value is multiplied by the `final_fee` to obtain a "virtual tip" that is later + /// added to a tip component in regular `priority` calculations. + /// It means that a `Normal` transaction can front-run a similarly-sized `Operational` + /// extrinsic (with no tip), by including a tip value greater than the virtual tip. + /// + /// ```rust,ignore + /// // For `Normal` + /// let priority = priority_calc(tip); + /// + /// // For `Operational` + /// let virtual_tip = (inclusion_fee + tip) * OperationalFeeMultiplier; + /// let priority = priority_calc(tip + virtual_tip); + /// ``` + /// + /// Note that since we use `final_fee` the multiplier applies also to the regular `tip` + /// sent with the transaction. So, not only does the transaction get a priority bump based + /// on the `inclusion_fee`, but we also amplify the impact of tips applied to `Operational` + /// transactions. + final int operationalFeeMultiplier = 5; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/treasury_pallet.dart b/quantus_sdk/lib/generated/planck/pallets/treasury_pallet.dart new file mode 100644 index 00000000..b64782d8 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/treasury_pallet.dart @@ -0,0 +1,74 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i4; +import 'dart:typed_data' as _i5; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i3; + +import '../types/pallet_treasury/pallet/call.dart' as _i7; +import '../types/quantus_runtime/runtime_call.dart' as _i6; +import '../types/sp_core/crypto/account_id32.dart' as _i2; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageValue<_i2.AccountId32> _treasuryAccount = const _i1.StorageValue<_i2.AccountId32>( + prefix: 'TreasuryPallet', + storage: 'TreasuryAccount', + valueCodec: _i2.AccountId32Codec(), + ); + + final _i1.StorageValue _treasuryPortion = const _i1.StorageValue( + prefix: 'TreasuryPallet', + storage: 'TreasuryPortion', + valueCodec: _i3.U8Codec.codec, + ); + + /// The treasury account that receives mining rewards. + _i4.Future<_i2.AccountId32?> treasuryAccount({_i1.BlockHash? at}) async { + final hashedKey = _treasuryAccount.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _treasuryAccount.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// The portion of mining rewards that goes to treasury (0-100). + _i4.Future treasuryPortion({_i1.BlockHash? at}) async { + final hashedKey = _treasuryPortion.hashedKey(); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _treasuryPortion.decodeValue(bytes); + } + return 0; /* Default */ + } + + /// Returns the storage key for `treasuryAccount`. + _i5.Uint8List treasuryAccountKey() { + final hashedKey = _treasuryAccount.hashedKey(); + return hashedKey; + } + + /// Returns the storage key for `treasuryPortion`. + _i5.Uint8List treasuryPortionKey() { + final hashedKey = _treasuryPortion.hashedKey(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Set the treasury account. Root only. + _i6.TreasuryPallet setTreasuryAccount({required _i2.AccountId32 account}) { + return _i6.TreasuryPallet(_i7.SetTreasuryAccount(account: account)); + } + + /// Set the treasury portion (0-100). Root only. + _i6.TreasuryPallet setTreasuryPortion({required int portion}) { + return _i6.TreasuryPallet(_i7.SetTreasuryPortion(portion: portion)); + } +} diff --git a/quantus_sdk/lib/generated/planck/pallets/utility.dart b/quantus_sdk/lib/generated/planck/pallets/utility.dart new file mode 100644 index 00000000..52251630 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/utility.dart @@ -0,0 +1,145 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import '../types/pallet_utility/pallet/call.dart' as _i2; +import '../types/quantus_runtime/origin_caller.dart' as _i3; +import '../types/quantus_runtime/runtime_call.dart' as _i1; +import '../types/sp_weights/weight_v2/weight.dart' as _i4; + +class Txs { + const Txs(); + + /// Send a batch of dispatch calls. + /// + /// May be called from any origin except `None`. + /// + /// - `calls`: The calls to be dispatched from the same origin. The number of call must not + /// exceed the constant: `batched_calls_limit` (available in constant metadata). + /// + /// If origin is root then the calls are dispatched without checking origin filter. (This + /// includes bypassing `frame_system::Config::BaseCallFilter`). + /// + /// ## Complexity + /// - O(C) where C is the number of calls to be batched. + /// + /// This will return `Ok` in all circumstances. To determine the success of the batch, an + /// event is deposited. If a call failed and the batch was interrupted, then the + /// `BatchInterrupted` event is deposited, along with the number of successful calls made + /// and the error of the failed call. If all were successful, then the `BatchCompleted` + /// event is deposited. + _i1.Utility batch({required List<_i1.RuntimeCall> calls}) { + return _i1.Utility(_i2.Batch(calls: calls)); + } + + /// Send a call through an indexed pseudonym of the sender. + /// + /// Filter from origin are passed along. The call will be dispatched with an origin which + /// use the same filter as the origin of this call. + /// + /// NOTE: If you need to ensure that any account-based filtering is not honored (i.e. + /// because you expect `proxy` to have been used prior in the call stack and you do not want + /// the call restrictions to apply to any sub-accounts), then use `as_multi_threshold_1` + /// in the Multisig pallet instead. + /// + /// NOTE: Prior to version *12, this was called `as_limited_sub`. + /// + /// The dispatch origin for this call must be _Signed_. + _i1.Utility asDerivative({required int index, required _i1.RuntimeCall call}) { + return _i1.Utility(_i2.AsDerivative(index: index, call: call)); + } + + /// Send a batch of dispatch calls and atomically execute them. + /// The whole transaction will rollback and fail if any of the calls failed. + /// + /// May be called from any origin except `None`. + /// + /// - `calls`: The calls to be dispatched from the same origin. The number of call must not + /// exceed the constant: `batched_calls_limit` (available in constant metadata). + /// + /// If origin is root then the calls are dispatched without checking origin filter. (This + /// includes bypassing `frame_system::Config::BaseCallFilter`). + /// + /// ## Complexity + /// - O(C) where C is the number of calls to be batched. + _i1.Utility batchAll({required List<_i1.RuntimeCall> calls}) { + return _i1.Utility(_i2.BatchAll(calls: calls)); + } + + /// Dispatches a function call with a provided origin. + /// + /// The dispatch origin for this call must be _Root_. + /// + /// ## Complexity + /// - O(1). + _i1.Utility dispatchAs({required _i3.OriginCaller asOrigin, required _i1.RuntimeCall call}) { + return _i1.Utility(_i2.DispatchAs(asOrigin: asOrigin, call: call)); + } + + /// Send a batch of dispatch calls. + /// Unlike `batch`, it allows errors and won't interrupt. + /// + /// May be called from any origin except `None`. + /// + /// - `calls`: The calls to be dispatched from the same origin. The number of call must not + /// exceed the constant: `batched_calls_limit` (available in constant metadata). + /// + /// If origin is root then the calls are dispatch without checking origin filter. (This + /// includes bypassing `frame_system::Config::BaseCallFilter`). + /// + /// ## Complexity + /// - O(C) where C is the number of calls to be batched. + _i1.Utility forceBatch({required List<_i1.RuntimeCall> calls}) { + return _i1.Utility(_i2.ForceBatch(calls: calls)); + } + + /// Dispatch a function call with a specified weight. + /// + /// This function does not check the weight of the call, and instead allows the + /// Root origin to specify the weight of the call. + /// + /// The dispatch origin for this call must be _Root_. + _i1.Utility withWeight({required _i1.RuntimeCall call, required _i4.Weight weight}) { + return _i1.Utility(_i2.WithWeight(call: call, weight: weight)); + } + + /// Dispatch a fallback call in the event the main call fails to execute. + /// May be called from any origin except `None`. + /// + /// This function first attempts to dispatch the `main` call. + /// If the `main` call fails, the `fallback` is attemted. + /// if the fallback is successfully dispatched, the weights of both calls + /// are accumulated and an event containing the main call error is deposited. + /// + /// In the event of a fallback failure the whole call fails + /// with the weights returned. + /// + /// - `main`: The main call to be dispatched. This is the primary action to execute. + /// - `fallback`: The fallback call to be dispatched in case the `main` call fails. + /// + /// ## Dispatch Logic + /// - If the origin is `root`, both the main and fallback calls are executed without + /// applying any origin filters. + /// - If the origin is not `root`, the origin filter is applied to both the `main` and + /// `fallback` calls. + /// + /// ## Use Case + /// - Some use cases might involve submitting a `batch` type call in either main, fallback + /// or both. + _i1.Utility ifElse({required _i1.RuntimeCall main, required _i1.RuntimeCall fallback}) { + return _i1.Utility(_i2.IfElse(main: main, fallback: fallback)); + } + + /// Dispatches a function call with a provided origin. + /// + /// Almost the same as [`Pallet::dispatch_as`] but forwards any error of the inner call. + /// + /// The dispatch origin for this call must be _Root_. + _i1.Utility dispatchAsFallible({required _i3.OriginCaller asOrigin, required _i1.RuntimeCall call}) { + return _i1.Utility(_i2.DispatchAsFallible(asOrigin: asOrigin, call: call)); + } +} + +class Constants { + Constants(); + + /// The limit on the number of batched calls. + final int batchedCallsLimit = 10922; +} diff --git a/quantus_sdk/lib/generated/planck/pallets/wormhole.dart b/quantus_sdk/lib/generated/planck/pallets/wormhole.dart new file mode 100644 index 00000000..781698c9 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/pallets/wormhole.dart @@ -0,0 +1,209 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i5; +import 'dart:typed_data' as _i6; + +import 'package:polkadart/polkadart.dart' as _i1; +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../types/pallet_wormhole/pallet/call.dart' as _i8; +import '../types/quantus_runtime/runtime_call.dart' as _i7; +import '../types/sp_arithmetic/per_things/permill.dart' as _i9; +import '../types/sp_core/crypto/account_id32.dart' as _i4; +import '../types/tuples_3.dart' as _i3; + +class Queries { + const Queries(this.__api); + + final _i1.StateApi __api; + + final _i1.StorageMap, bool> _usedNullifiers = const _i1.StorageMap, bool>( + prefix: 'Wormhole', + storage: 'UsedNullifiers', + valueCodec: _i2.BoolCodec.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i2.U8ArrayCodec(32)), + ); + + final _i1.StorageMap<_i3.Tuple5, dynamic> _transferProof = + const _i1.StorageMap<_i3.Tuple5, dynamic>( + prefix: 'Wormhole', + storage: 'TransferProof', + valueCodec: _i2.NullCodec.codec, + hasher: _i1.StorageHasher.identity( + _i3.Tuple5Codec( + _i2.U32Codec.codec, + _i2.U64Codec.codec, + _i4.AccountId32Codec(), + _i4.AccountId32Codec(), + _i2.U128Codec.codec, + ), + ), + ); + + final _i1.StorageMap<_i4.AccountId32, BigInt> _transferCount = const _i1.StorageMap<_i4.AccountId32, BigInt>( + prefix: 'Wormhole', + storage: 'TransferCount', + valueCodec: _i2.U64Codec.codec, + hasher: _i1.StorageHasher.blake2b128Concat(_i4.AccountId32Codec()), + ); + + _i5.Future usedNullifiers(List key1, {_i1.BlockHash? at}) async { + final hashedKey = _usedNullifiers.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _usedNullifiers.decodeValue(bytes); + } + return false; /* Default */ + } + + /// Transfer proofs for wormhole transfers (both native and assets) + _i5.Future transferProof( + _i3.Tuple5 key1, { + _i1.BlockHash? at, + }) async { + final hashedKey = _transferProof.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _transferProof.decodeValue(bytes); + } + return null; /* Nullable */ + } + + /// Transfer count for all wormhole transfers + _i5.Future transferCount(_i4.AccountId32 key1, {_i1.BlockHash? at}) async { + final hashedKey = _transferCount.hashedKeyFor(key1); + final bytes = await __api.getStorage(hashedKey, at: at); + if (bytes != null) { + return _transferCount.decodeValue(bytes); + } + return BigInt.zero; /* Default */ + } + + _i5.Future> multiUsedNullifiers(List> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _usedNullifiers.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _usedNullifiers.decodeValue(v.key)).toList(); + } + return (keys.map((key) => false).toList() as List); /* Default */ + } + + /// Transfer proofs for wormhole transfers (both native and assets) + _i5.Future> multiTransferProof( + List<_i3.Tuple5> keys, { + _i1.BlockHash? at, + }) async { + final hashedKeys = keys.map((key) => _transferProof.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _transferProof.decodeValue(v.key)).toList(); + } + return []; /* Nullable */ + } + + /// Transfer count for all wormhole transfers + _i5.Future> multiTransferCount(List<_i4.AccountId32> keys, {_i1.BlockHash? at}) async { + final hashedKeys = keys.map((key) => _transferCount.hashedKeyFor(key)).toList(); + final bytes = await __api.queryStorageAt(hashedKeys, at: at); + if (bytes.isNotEmpty) { + return bytes.first.changes.map((v) => _transferCount.decodeValue(v.key)).toList(); + } + return (keys.map((key) => BigInt.zero).toList() as List); /* Default */ + } + + /// Returns the storage key for `usedNullifiers`. + _i6.Uint8List usedNullifiersKey(List key1) { + final hashedKey = _usedNullifiers.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `transferProof`. + _i6.Uint8List transferProofKey(_i3.Tuple5 key1) { + final hashedKey = _transferProof.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage key for `transferCount`. + _i6.Uint8List transferCountKey(_i4.AccountId32 key1) { + final hashedKey = _transferCount.hashedKeyFor(key1); + return hashedKey; + } + + /// Returns the storage map key prefix for `usedNullifiers`. + _i6.Uint8List usedNullifiersMapPrefix() { + final hashedKey = _usedNullifiers.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `transferProof`. + _i6.Uint8List transferProofMapPrefix() { + final hashedKey = _transferProof.mapPrefix(); + return hashedKey; + } + + /// Returns the storage map key prefix for `transferCount`. + _i6.Uint8List transferCountMapPrefix() { + final hashedKey = _transferCount.mapPrefix(); + return hashedKey; + } +} + +class Txs { + const Txs(); + + /// Verify an aggregated wormhole proof and process all transfers in the batch + _i7.Wormhole verifyAggregatedProof({required List proofBytes}) { + return _i7.Wormhole(_i8.VerifyAggregatedProof(proofBytes: proofBytes)); + } +} + +class Constants { + Constants(); + + /// Account ID used as the "from" account when creating transfer proofs for minted tokens + final _i4.AccountId32 mintingAccount = const [ + 109, + 111, + 100, + 108, + 119, + 111, + 114, + 109, + 104, + 111, + 108, + 101, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ]; + + /// Minimum transfer amount required for wormhole transfers. + /// This prevents dust transfers that waste storage. + final BigInt minimumTransferAmount = BigInt.from(10000000000000); + + /// Volume fee rate in basis points (1 basis point = 0.01%). + /// This must match the fee rate used in proof generation. + final int volumeFeeRateBps = 10; + + /// Proportion of volume fees to burn (not mint). The remainder goes to the block author. + /// Example: Permill::from_percent(50) means 50% burned, 50% to miner. + final _i9.Permill volumeFeesBurnRate = 500000; +} diff --git a/quantus_sdk/lib/generated/planck/planck.dart b/quantus_sdk/lib/generated/planck/planck.dart new file mode 100644 index 00000000..78af801a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/planck.dart @@ -0,0 +1,224 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:async' as _i23; + +import 'package:polkadart/polkadart.dart' as _i1; + +import 'pallets/assets.dart' as _i18; +import 'pallets/assets_holder.dart' as _i19; +import 'pallets/balances.dart' as _i4; +import 'pallets/conviction_voting.dart' as _i13; +import 'pallets/mining_rewards.dart' as _i8; +import 'pallets/multisig.dart' as _i20; +import 'pallets/preimage.dart' as _i9; +import 'pallets/q_po_w.dart' as _i7; +import 'pallets/recovery.dart' as _i17; +import 'pallets/referenda.dart' as _i11; +import 'pallets/reversible_transfers.dart' as _i12; +import 'pallets/scheduler.dart' as _i10; +import 'pallets/sudo.dart' as _i6; +import 'pallets/system.dart' as _i2; +import 'pallets/tech_collective.dart' as _i14; +import 'pallets/tech_referenda.dart' as _i15; +import 'pallets/timestamp.dart' as _i3; +import 'pallets/transaction_payment.dart' as _i5; +import 'pallets/treasury_pallet.dart' as _i16; +import 'pallets/utility.dart' as _i22; +import 'pallets/wormhole.dart' as _i21; + +class Queries { + Queries(_i1.StateApi api) + : system = _i2.Queries(api), + timestamp = _i3.Queries(api), + balances = _i4.Queries(api), + transactionPayment = _i5.Queries(api), + sudo = _i6.Queries(api), + qPoW = _i7.Queries(api), + miningRewards = _i8.Queries(api), + preimage = _i9.Queries(api), + scheduler = _i10.Queries(api), + referenda = _i11.Queries(api), + reversibleTransfers = _i12.Queries(api), + convictionVoting = _i13.Queries(api), + techCollective = _i14.Queries(api), + techReferenda = _i15.Queries(api), + treasuryPallet = _i16.Queries(api), + recovery = _i17.Queries(api), + assets = _i18.Queries(api), + assetsHolder = _i19.Queries(api), + multisig = _i20.Queries(api), + wormhole = _i21.Queries(api); + + final _i2.Queries system; + + final _i3.Queries timestamp; + + final _i4.Queries balances; + + final _i5.Queries transactionPayment; + + final _i6.Queries sudo; + + final _i7.Queries qPoW; + + final _i8.Queries miningRewards; + + final _i9.Queries preimage; + + final _i10.Queries scheduler; + + final _i11.Queries referenda; + + final _i12.Queries reversibleTransfers; + + final _i13.Queries convictionVoting; + + final _i14.Queries techCollective; + + final _i15.Queries techReferenda; + + final _i16.Queries treasuryPallet; + + final _i17.Queries recovery; + + final _i18.Queries assets; + + final _i19.Queries assetsHolder; + + final _i20.Queries multisig; + + final _i21.Queries wormhole; +} + +class Extrinsics { + Extrinsics(); + + final _i2.Txs system = _i2.Txs(); + + final _i3.Txs timestamp = _i3.Txs(); + + final _i4.Txs balances = _i4.Txs(); + + final _i6.Txs sudo = _i6.Txs(); + + final _i9.Txs preimage = _i9.Txs(); + + final _i10.Txs scheduler = _i10.Txs(); + + final _i22.Txs utility = _i22.Txs(); + + final _i11.Txs referenda = _i11.Txs(); + + final _i12.Txs reversibleTransfers = _i12.Txs(); + + final _i13.Txs convictionVoting = _i13.Txs(); + + final _i14.Txs techCollective = _i14.Txs(); + + final _i15.Txs techReferenda = _i15.Txs(); + + final _i16.Txs treasuryPallet = _i16.Txs(); + + final _i17.Txs recovery = _i17.Txs(); + + final _i18.Txs assets = _i18.Txs(); + + final _i20.Txs multisig = _i20.Txs(); + + final _i21.Txs wormhole = _i21.Txs(); +} + +class Constants { + Constants(); + + final _i2.Constants system = _i2.Constants(); + + final _i3.Constants timestamp = _i3.Constants(); + + final _i4.Constants balances = _i4.Constants(); + + final _i5.Constants transactionPayment = _i5.Constants(); + + final _i7.Constants qPoW = _i7.Constants(); + + final _i8.Constants miningRewards = _i8.Constants(); + + final _i10.Constants scheduler = _i10.Constants(); + + final _i22.Constants utility = _i22.Constants(); + + final _i11.Constants referenda = _i11.Constants(); + + final _i12.Constants reversibleTransfers = _i12.Constants(); + + final _i13.Constants convictionVoting = _i13.Constants(); + + final _i15.Constants techReferenda = _i15.Constants(); + + final _i17.Constants recovery = _i17.Constants(); + + final _i18.Constants assets = _i18.Constants(); + + final _i20.Constants multisig = _i20.Constants(); + + final _i21.Constants wormhole = _i21.Constants(); +} + +class Rpc { + const Rpc({required this.state, required this.system}); + + final _i1.StateApi state; + + final _i1.SystemApi system; +} + +class Registry { + Registry(); + + final int extrinsicVersion = 4; + + List getSignedExtensionTypes() { + return ['CheckMortality', 'CheckNonce', 'ChargeTransactionPayment', 'CheckMetadataHash']; + } + + List getSignedExtensionExtra() { + return ['CheckSpecVersion', 'CheckTxVersion', 'CheckGenesis', 'CheckMortality', 'CheckMetadataHash']; + } +} + +class Planck { + Planck._(this._provider, this.rpc) + : query = Queries(rpc.state), + constant = Constants(), + tx = Extrinsics(), + registry = Registry(); + + factory Planck(_i1.Provider provider) { + final rpc = Rpc(state: _i1.StateApi(provider), system: _i1.SystemApi(provider)); + return Planck._(provider, rpc); + } + + factory Planck.url(Uri url) { + final provider = _i1.Provider.fromUri(url); + return Planck(provider); + } + + final _i1.Provider _provider; + + final Queries query; + + final Constants constant; + + final Rpc rpc; + + final Extrinsics tx; + + final Registry registry; + + _i23.Future connect() async { + return await _provider.connect(); + } + + _i23.Future disconnect() async { + return await _provider.disconnect(); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/b_tree_map.dart b/quantus_sdk/lib/generated/planck/types/b_tree_map.dart new file mode 100644 index 00000000..25b6b30c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/b_tree_map.dart @@ -0,0 +1,32 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i3; + +import 'sp_core/crypto/account_id32.dart' as _i2; +import 'tuples.dart' as _i1; + +typedef BTreeMap = List<_i1.Tuple2<_i2.AccountId32, int>>; + +class BTreeMapCodec with _i3.Codec { + const BTreeMapCodec(); + + @override + BTreeMap decode(_i3.Input input) { + return const _i3.SequenceCodec<_i1.Tuple2<_i2.AccountId32, int>>( + _i1.Tuple2Codec<_i2.AccountId32, int>(_i2.AccountId32Codec(), _i3.U32Codec.codec), + ).decode(input); + } + + @override + void encodeTo(BTreeMap value, _i3.Output output) { + const _i3.SequenceCodec<_i1.Tuple2<_i2.AccountId32, int>>( + _i1.Tuple2Codec<_i2.AccountId32, int>(_i2.AccountId32Codec(), _i3.U32Codec.codec), + ).encodeTo(value, output); + } + + @override + int sizeHint(BTreeMap value) { + return const _i3.SequenceCodec<_i1.Tuple2<_i2.AccountId32, int>>( + _i1.Tuple2Codec<_i2.AccountId32, int>(_i2.AccountId32Codec(), _i3.U32Codec.codec), + ).sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/bounded_collections/bounded_btree_map/bounded_b_tree_map.dart b/quantus_sdk/lib/generated/planck/types/bounded_collections/bounded_btree_map/bounded_b_tree_map.dart new file mode 100644 index 00000000..7b21a5cb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/bounded_collections/bounded_btree_map/bounded_b_tree_map.dart @@ -0,0 +1,31 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../../b_tree_map.dart' as _i1; +import '../../sp_core/crypto/account_id32.dart' as _i4; +import '../../tuples.dart' as _i3; + +typedef BoundedBTreeMap = _i1.BTreeMap; + +class BoundedBTreeMapCodec with _i2.Codec { + const BoundedBTreeMapCodec(); + + @override + BoundedBTreeMap decode(_i2.Input input) { + return const _i2.SequenceCodec<_i3.Tuple2<_i4.AccountId32, int>>( + _i3.Tuple2Codec<_i4.AccountId32, int>(_i4.AccountId32Codec(), _i2.U32Codec.codec), + ).decode(input); + } + + @override + void encodeTo(BoundedBTreeMap value, _i2.Output output) { + const _i2.SequenceCodec<_i3.Tuple2<_i4.AccountId32, int>>( + _i3.Tuple2Codec<_i4.AccountId32, int>(_i4.AccountId32Codec(), _i2.U32Codec.codec), + ).encodeTo(value, output); + } + + @override + int sizeHint(BoundedBTreeMap value) { + return const _i1.BTreeMapCodec().sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/cow_1.dart b/quantus_sdk/lib/generated/planck/types/cow_1.dart new file mode 100644 index 00000000..deb05241 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/cow_1.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef Cow = String; + +class CowCodec with _i1.Codec { + const CowCodec(); + + @override + Cow decode(_i1.Input input) { + return _i1.StrCodec.codec.decode(input); + } + + @override + void encodeTo(Cow value, _i1.Output output) { + _i1.StrCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(Cow value) { + return _i1.StrCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/cow_2.dart b/quantus_sdk/lib/generated/planck/types/cow_2.dart new file mode 100644 index 00000000..5077d888 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/cow_2.dart @@ -0,0 +1,31 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i2; + +import 'tuples.dart' as _i1; + +typedef Cow = List<_i1.Tuple2, int>>; + +class CowCodec with _i2.Codec { + const CowCodec(); + + @override + Cow decode(_i2.Input input) { + return const _i2.SequenceCodec<_i1.Tuple2, int>>( + _i1.Tuple2Codec, int>(_i2.U8ArrayCodec(8), _i2.U32Codec.codec), + ).decode(input); + } + + @override + void encodeTo(Cow value, _i2.Output output) { + const _i2.SequenceCodec<_i1.Tuple2, int>>( + _i1.Tuple2Codec, int>(_i2.U8ArrayCodec(8), _i2.U32Codec.codec), + ).encodeTo(value, output); + } + + @override + int sizeHint(Cow value) { + return const _i2.SequenceCodec<_i1.Tuple2, int>>( + _i1.Tuple2Codec, int>(_i2.U8ArrayCodec(8), _i2.U32Codec.codec), + ).sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_metadata_hash_extension/check_metadata_hash.dart b/quantus_sdk/lib/generated/planck/types/frame_metadata_hash_extension/check_metadata_hash.dart new file mode 100644 index 00000000..58d56ebe --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_metadata_hash_extension/check_metadata_hash.dart @@ -0,0 +1,52 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'mode.dart' as _i2; + +class CheckMetadataHash { + const CheckMetadataHash({required this.mode}); + + factory CheckMetadataHash.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Mode + final _i2.Mode mode; + + static const $CheckMetadataHashCodec codec = $CheckMetadataHashCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'mode': mode.toJson()}; + + @override + bool operator ==(Object other) => identical(this, other) || other is CheckMetadataHash && other.mode == mode; + + @override + int get hashCode => mode.hashCode; +} + +class $CheckMetadataHashCodec with _i1.Codec { + const $CheckMetadataHashCodec(); + + @override + void encodeTo(CheckMetadataHash obj, _i1.Output output) { + _i2.Mode.codec.encodeTo(obj.mode, output); + } + + @override + CheckMetadataHash decode(_i1.Input input) { + return CheckMetadataHash(mode: _i2.Mode.codec.decode(input)); + } + + @override + int sizeHint(CheckMetadataHash obj) { + int size = 0; + size = size + _i2.Mode.codec.sizeHint(obj.mode); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_metadata_hash_extension/mode.dart b/quantus_sdk/lib/generated/planck/types/frame_metadata_hash_extension/mode.dart new file mode 100644 index 00000000..5ec6e0eb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_metadata_hash_extension/mode.dart @@ -0,0 +1,49 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum Mode { + disabled('Disabled', 0), + enabled('Enabled', 1); + + const Mode(this.variantName, this.codecIndex); + + factory Mode.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ModeCodec codec = $ModeCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ModeCodec with _i1.Codec { + const $ModeCodec(); + + @override + Mode decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Mode.disabled; + case 1: + return Mode.enabled; + default: + throw Exception('Mode: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Mode value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/dispatch_class.dart b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/dispatch_class.dart new file mode 100644 index 00000000..e4f8be5b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/dispatch_class.dart @@ -0,0 +1,52 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum DispatchClass { + normal('Normal', 0), + operational('Operational', 1), + mandatory('Mandatory', 2); + + const DispatchClass(this.variantName, this.codecIndex); + + factory DispatchClass.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $DispatchClassCodec codec = $DispatchClassCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $DispatchClassCodec with _i1.Codec { + const $DispatchClassCodec(); + + @override + DispatchClass decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return DispatchClass.normal; + case 1: + return DispatchClass.operational; + case 2: + return DispatchClass.mandatory; + default: + throw Exception('DispatchClass: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(DispatchClass value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/pays.dart b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/pays.dart new file mode 100644 index 00000000..05394649 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/pays.dart @@ -0,0 +1,49 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum Pays { + yes('Yes', 0), + no('No', 1); + + const Pays(this.variantName, this.codecIndex); + + factory Pays.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $PaysCodec codec = $PaysCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $PaysCodec with _i1.Codec { + const $PaysCodec(); + + @override + Pays decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Pays.yes; + case 1: + return Pays.no; + default: + throw Exception('Pays: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Pays value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_1.dart b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_1.dart new file mode 100644 index 00000000..7f410127 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_1.dart @@ -0,0 +1,75 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../sp_weights/weight_v2/weight.dart' as _i2; + +class PerDispatchClass { + const PerDispatchClass({required this.normal, required this.operational, required this.mandatory}); + + factory PerDispatchClass.decode(_i1.Input input) { + return codec.decode(input); + } + + /// T + final _i2.Weight normal; + + /// T + final _i2.Weight operational; + + /// T + final _i2.Weight mandatory; + + static const $PerDispatchClassCodec codec = $PerDispatchClassCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map> toJson() => { + 'normal': normal.toJson(), + 'operational': operational.toJson(), + 'mandatory': mandatory.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is PerDispatchClass && + other.normal == normal && + other.operational == operational && + other.mandatory == mandatory; + + @override + int get hashCode => Object.hash(normal, operational, mandatory); +} + +class $PerDispatchClassCodec with _i1.Codec { + const $PerDispatchClassCodec(); + + @override + void encodeTo(PerDispatchClass obj, _i1.Output output) { + _i2.Weight.codec.encodeTo(obj.normal, output); + _i2.Weight.codec.encodeTo(obj.operational, output); + _i2.Weight.codec.encodeTo(obj.mandatory, output); + } + + @override + PerDispatchClass decode(_i1.Input input) { + return PerDispatchClass( + normal: _i2.Weight.codec.decode(input), + operational: _i2.Weight.codec.decode(input), + mandatory: _i2.Weight.codec.decode(input), + ); + } + + @override + int sizeHint(PerDispatchClass obj) { + int size = 0; + size = size + _i2.Weight.codec.sizeHint(obj.normal); + size = size + _i2.Weight.codec.sizeHint(obj.operational); + size = size + _i2.Weight.codec.sizeHint(obj.mandatory); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_2.dart b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_2.dart new file mode 100644 index 00000000..4055b7ae --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_2.dart @@ -0,0 +1,75 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../frame_system/limits/weights_per_class.dart' as _i2; + +class PerDispatchClass { + const PerDispatchClass({required this.normal, required this.operational, required this.mandatory}); + + factory PerDispatchClass.decode(_i1.Input input) { + return codec.decode(input); + } + + /// T + final _i2.WeightsPerClass normal; + + /// T + final _i2.WeightsPerClass operational; + + /// T + final _i2.WeightsPerClass mandatory; + + static const $PerDispatchClassCodec codec = $PerDispatchClassCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map?>> toJson() => { + 'normal': normal.toJson(), + 'operational': operational.toJson(), + 'mandatory': mandatory.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is PerDispatchClass && + other.normal == normal && + other.operational == operational && + other.mandatory == mandatory; + + @override + int get hashCode => Object.hash(normal, operational, mandatory); +} + +class $PerDispatchClassCodec with _i1.Codec { + const $PerDispatchClassCodec(); + + @override + void encodeTo(PerDispatchClass obj, _i1.Output output) { + _i2.WeightsPerClass.codec.encodeTo(obj.normal, output); + _i2.WeightsPerClass.codec.encodeTo(obj.operational, output); + _i2.WeightsPerClass.codec.encodeTo(obj.mandatory, output); + } + + @override + PerDispatchClass decode(_i1.Input input) { + return PerDispatchClass( + normal: _i2.WeightsPerClass.codec.decode(input), + operational: _i2.WeightsPerClass.codec.decode(input), + mandatory: _i2.WeightsPerClass.codec.decode(input), + ); + } + + @override + int sizeHint(PerDispatchClass obj) { + int size = 0; + size = size + _i2.WeightsPerClass.codec.sizeHint(obj.normal); + size = size + _i2.WeightsPerClass.codec.sizeHint(obj.operational); + size = size + _i2.WeightsPerClass.codec.sizeHint(obj.mandatory); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_3.dart b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_3.dart new file mode 100644 index 00000000..a45897bd --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/per_dispatch_class_3.dart @@ -0,0 +1,69 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class PerDispatchClass { + const PerDispatchClass({required this.normal, required this.operational, required this.mandatory}); + + factory PerDispatchClass.decode(_i1.Input input) { + return codec.decode(input); + } + + /// T + final int normal; + + /// T + final int operational; + + /// T + final int mandatory; + + static const $PerDispatchClassCodec codec = $PerDispatchClassCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'normal': normal, 'operational': operational, 'mandatory': mandatory}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is PerDispatchClass && + other.normal == normal && + other.operational == operational && + other.mandatory == mandatory; + + @override + int get hashCode => Object.hash(normal, operational, mandatory); +} + +class $PerDispatchClassCodec with _i1.Codec { + const $PerDispatchClassCodec(); + + @override + void encodeTo(PerDispatchClass obj, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(obj.normal, output); + _i1.U32Codec.codec.encodeTo(obj.operational, output); + _i1.U32Codec.codec.encodeTo(obj.mandatory, output); + } + + @override + PerDispatchClass decode(_i1.Input input) { + return PerDispatchClass( + normal: _i1.U32Codec.codec.decode(input), + operational: _i1.U32Codec.codec.decode(input), + mandatory: _i1.U32Codec.codec.decode(input), + ); + } + + @override + int sizeHint(PerDispatchClass obj) { + int size = 0; + size = size + _i1.U32Codec.codec.sizeHint(obj.normal); + size = size + _i1.U32Codec.codec.sizeHint(obj.operational); + size = size + _i1.U32Codec.codec.sizeHint(obj.mandatory); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/post_dispatch_info.dart b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/post_dispatch_info.dart new file mode 100644 index 00000000..05dbdaed --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/post_dispatch_info.dart @@ -0,0 +1,63 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../sp_weights/weight_v2/weight.dart' as _i2; +import 'pays.dart' as _i3; + +class PostDispatchInfo { + const PostDispatchInfo({this.actualWeight, required this.paysFee}); + + factory PostDispatchInfo.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Option + final _i2.Weight? actualWeight; + + /// Pays + final _i3.Pays paysFee; + + static const $PostDispatchInfoCodec codec = $PostDispatchInfoCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'actualWeight': actualWeight?.toJson(), 'paysFee': paysFee.toJson()}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is PostDispatchInfo && other.actualWeight == actualWeight && other.paysFee == paysFee; + + @override + int get hashCode => Object.hash(actualWeight, paysFee); +} + +class $PostDispatchInfoCodec with _i1.Codec { + const $PostDispatchInfoCodec(); + + @override + void encodeTo(PostDispatchInfo obj, _i1.Output output) { + const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).encodeTo(obj.actualWeight, output); + _i3.Pays.codec.encodeTo(obj.paysFee, output); + } + + @override + PostDispatchInfo decode(_i1.Input input) { + return PostDispatchInfo( + actualWeight: const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).decode(input), + paysFee: _i3.Pays.codec.decode(input), + ); + } + + @override + int sizeHint(PostDispatchInfo obj) { + int size = 0; + size = size + const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).sizeHint(obj.actualWeight); + size = size + _i3.Pays.codec.sizeHint(obj.paysFee); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/raw_origin.dart b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/raw_origin.dart new file mode 100644 index 00000000..c329c2b4 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/dispatch/raw_origin.dart @@ -0,0 +1,190 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +abstract class RawOrigin { + const RawOrigin(); + + factory RawOrigin.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $RawOriginCodec codec = $RawOriginCodec(); + + static const $RawOrigin values = $RawOrigin(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $RawOrigin { + const $RawOrigin(); + + Root root() { + return Root(); + } + + Signed signed(_i3.AccountId32 value0) { + return Signed(value0); + } + + None none() { + return None(); + } + + Authorized authorized() { + return Authorized(); + } +} + +class $RawOriginCodec with _i1.Codec { + const $RawOriginCodec(); + + @override + RawOrigin decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return const Root(); + case 1: + return Signed._decode(input); + case 2: + return const None(); + case 3: + return const Authorized(); + default: + throw Exception('RawOrigin: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(RawOrigin value, _i1.Output output) { + switch (value.runtimeType) { + case Root: + (value as Root).encodeTo(output); + break; + case Signed: + (value as Signed).encodeTo(output); + break; + case None: + (value as None).encodeTo(output); + break; + case Authorized: + (value as Authorized).encodeTo(output); + break; + default: + throw Exception('RawOrigin: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(RawOrigin value) { + switch (value.runtimeType) { + case Root: + return 1; + case Signed: + return (value as Signed)._sizeHint(); + case None: + return 1; + case Authorized: + return 1; + default: + throw Exception('RawOrigin: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Root extends RawOrigin { + const Root(); + + @override + Map toJson() => {'Root': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + } + + @override + bool operator ==(Object other) => other is Root; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Signed extends RawOrigin { + const Signed(this.value0); + + factory Signed._decode(_i1.Input input) { + return Signed(const _i1.U8ArrayCodec(32).decode(input)); + } + + /// AccountId + final _i3.AccountId32 value0; + + @override + Map> toJson() => {'Signed': value0.toList()}; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Signed && _i4.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} + +class None extends RawOrigin { + const None(); + + @override + Map toJson() => {'None': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + } + + @override + bool operator ==(Object other) => other is None; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Authorized extends RawOrigin { + const Authorized(); + + @override + Map toJson() => {'Authorized': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + } + + @override + bool operator ==(Object other) => other is Authorized; + + @override + int get hashCode => runtimeType.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/pallet_id.dart b/quantus_sdk/lib/generated/planck/types/frame_support/pallet_id.dart new file mode 100644 index 00000000..ae6812ea --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/pallet_id.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef PalletId = List; + +class PalletIdCodec with _i1.Codec { + const PalletIdCodec(); + + @override + PalletId decode(_i1.Input input) { + return const _i1.U8ArrayCodec(8).decode(input); + } + + @override + void encodeTo(PalletId value, _i1.Output output) { + const _i1.U8ArrayCodec(8).encodeTo(value, output); + } + + @override + int sizeHint(PalletId value) { + return const _i1.U8ArrayCodec(8).sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/traits/preimages/bounded.dart b/quantus_sdk/lib/generated/planck/types/frame_support/traits/preimages/bounded.dart new file mode 100644 index 00000000..289a499b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/traits/preimages/bounded.dart @@ -0,0 +1,200 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../../primitive_types/h256.dart' as _i3; + +abstract class Bounded { + const Bounded(); + + factory Bounded.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $BoundedCodec codec = $BoundedCodec(); + + static const $Bounded values = $Bounded(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Bounded { + const $Bounded(); + + Legacy legacy({required _i3.H256 hash}) { + return Legacy(hash: hash); + } + + Inline inline(List value0) { + return Inline(value0); + } + + Lookup lookup({required _i3.H256 hash, required int len}) { + return Lookup(hash: hash, len: len); + } +} + +class $BoundedCodec with _i1.Codec { + const $BoundedCodec(); + + @override + Bounded decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Legacy._decode(input); + case 1: + return Inline._decode(input); + case 2: + return Lookup._decode(input); + default: + throw Exception('Bounded: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Bounded value, _i1.Output output) { + switch (value.runtimeType) { + case Legacy: + (value as Legacy).encodeTo(output); + break; + case Inline: + (value as Inline).encodeTo(output); + break; + case Lookup: + (value as Lookup).encodeTo(output); + break; + default: + throw Exception('Bounded: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Bounded value) { + switch (value.runtimeType) { + case Legacy: + return (value as Legacy)._sizeHint(); + case Inline: + return (value as Inline)._sizeHint(); + case Lookup: + return (value as Lookup)._sizeHint(); + default: + throw Exception('Bounded: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Legacy extends Bounded { + const Legacy({required this.hash}); + + factory Legacy._decode(_i1.Input input) { + return Legacy(hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// H::Output + final _i3.H256 hash; + + @override + Map>> toJson() => { + 'Legacy': {'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Legacy && _i4.listsEqual(other.hash, hash); + + @override + int get hashCode => hash.hashCode; +} + +class Inline extends Bounded { + const Inline(this.value0); + + factory Inline._decode(_i1.Input input) { + return Inline(_i1.U8SequenceCodec.codec.decode(input)); + } + + /// BoundedInline + final List value0; + + @override + Map> toJson() => {'Inline': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U8SequenceCodec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Inline && _i4.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} + +class Lookup extends Bounded { + const Lookup({required this.hash, required this.len}); + + factory Lookup._decode(_i1.Input input) { + return Lookup(hash: const _i1.U8ArrayCodec(32).decode(input), len: _i1.U32Codec.codec.decode(input)); + } + + /// H::Output + final _i3.H256 hash; + + /// u32 + final int len; + + @override + Map> toJson() => { + 'Lookup': {'hash': hash.toList(), 'len': len}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.H256Codec().sizeHint(hash); + size = size + _i1.U32Codec.codec.sizeHint(len); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + _i1.U32Codec.codec.encodeTo(len, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Lookup && _i4.listsEqual(other.hash, hash) && other.len == len; + + @override + int get hashCode => Object.hash(hash, len); +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/traits/schedule/dispatch_time.dart b/quantus_sdk/lib/generated/planck/types/frame_support/traits/schedule/dispatch_time.dart new file mode 100644 index 00000000..5a14fb3f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/traits/schedule/dispatch_time.dart @@ -0,0 +1,145 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +abstract class DispatchTime { + const DispatchTime(); + + factory DispatchTime.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $DispatchTimeCodec codec = $DispatchTimeCodec(); + + static const $DispatchTime values = $DispatchTime(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $DispatchTime { + const $DispatchTime(); + + At at(int value0) { + return At(value0); + } + + After after(int value0) { + return After(value0); + } +} + +class $DispatchTimeCodec with _i1.Codec { + const $DispatchTimeCodec(); + + @override + DispatchTime decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return At._decode(input); + case 1: + return After._decode(input); + default: + throw Exception('DispatchTime: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(DispatchTime value, _i1.Output output) { + switch (value.runtimeType) { + case At: + (value as At).encodeTo(output); + break; + case After: + (value as After).encodeTo(output); + break; + default: + throw Exception('DispatchTime: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(DispatchTime value) { + switch (value.runtimeType) { + case At: + return (value as At)._sizeHint(); + case After: + return (value as After)._sizeHint(); + default: + throw Exception('DispatchTime: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class At extends DispatchTime { + const At(this.value0); + + factory At._decode(_i1.Input input) { + return At(_i1.U32Codec.codec.decode(input)); + } + + /// BlockNumber + final int value0; + + @override + Map toJson() => {'At': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is At && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class After extends DispatchTime { + const After(this.value0); + + factory After._decode(_i1.Input input) { + return After(_i1.U32Codec.codec.decode(input)); + } + + /// BlockNumber + final int value0; + + @override + Map toJson() => {'After': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is After && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/balance_status.dart b/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/balance_status.dart new file mode 100644 index 00000000..5f830490 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/balance_status.dart @@ -0,0 +1,49 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum BalanceStatus { + free('Free', 0), + reserved('Reserved', 1); + + const BalanceStatus(this.variantName, this.codecIndex); + + factory BalanceStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $BalanceStatusCodec codec = $BalanceStatusCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $BalanceStatusCodec with _i1.Codec { + const $BalanceStatusCodec(); + + @override + BalanceStatus decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return BalanceStatus.free; + case 1: + return BalanceStatus.reserved; + default: + throw Exception('BalanceStatus: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(BalanceStatus value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/id_amount_1.dart b/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/id_amount_1.dart new file mode 100644 index 00000000..852db11b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/id_amount_1.dart @@ -0,0 +1,58 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../../../quantus_runtime/runtime_hold_reason.dart' as _i2; + +class IdAmount { + const IdAmount({required this.id, required this.amount}); + + factory IdAmount.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Id + final _i2.RuntimeHoldReason id; + + /// Balance + final BigInt amount; + + static const $IdAmountCodec codec = $IdAmountCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'id': id.toJson(), 'amount': amount}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is IdAmount && other.id == id && other.amount == amount; + + @override + int get hashCode => Object.hash(id, amount); +} + +class $IdAmountCodec with _i1.Codec { + const $IdAmountCodec(); + + @override + void encodeTo(IdAmount obj, _i1.Output output) { + _i2.RuntimeHoldReason.codec.encodeTo(obj.id, output); + _i1.U128Codec.codec.encodeTo(obj.amount, output); + } + + @override + IdAmount decode(_i1.Input input) { + return IdAmount(id: _i2.RuntimeHoldReason.codec.decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + @override + int sizeHint(IdAmount obj) { + int size = 0; + size = size + _i2.RuntimeHoldReason.codec.sizeHint(obj.id); + size = size + _i1.U128Codec.codec.sizeHint(obj.amount); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/id_amount_2.dart b/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/id_amount_2.dart new file mode 100644 index 00000000..6ec0b06d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_support/traits/tokens/misc/id_amount_2.dart @@ -0,0 +1,58 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../../../quantus_runtime/runtime_freeze_reason.dart' as _i2; + +class IdAmount { + const IdAmount({required this.id, required this.amount}); + + factory IdAmount.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Id + final _i2.RuntimeFreezeReason id; + + /// Balance + final BigInt amount; + + static const $IdAmountCodec codec = $IdAmountCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'id': null, 'amount': amount}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is IdAmount && other.id == id && other.amount == amount; + + @override + int get hashCode => Object.hash(id, amount); +} + +class $IdAmountCodec with _i1.Codec { + const $IdAmountCodec(); + + @override + void encodeTo(IdAmount obj, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(obj.id, output); + _i1.U128Codec.codec.encodeTo(obj.amount, output); + } + + @override + IdAmount decode(_i1.Input input) { + return IdAmount(id: _i1.NullCodec.codec.decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + @override + int sizeHint(IdAmount obj) { + int size = 0; + size = size + const _i2.RuntimeFreezeReasonCodec().sizeHint(obj.id); + size = size + _i1.U128Codec.codec.sizeHint(obj.amount); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/account_info.dart b/quantus_sdk/lib/generated/planck/types/frame_system/account_info.dart new file mode 100644 index 00000000..d35ddcf1 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/account_info.dart @@ -0,0 +1,97 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../pallet_balances/types/account_data.dart' as _i2; + +class AccountInfo { + const AccountInfo({ + required this.nonce, + required this.consumers, + required this.providers, + required this.sufficients, + required this.data, + }); + + factory AccountInfo.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Nonce + final int nonce; + + /// RefCount + final int consumers; + + /// RefCount + final int providers; + + /// RefCount + final int sufficients; + + /// AccountData + final _i2.AccountData data; + + static const $AccountInfoCodec codec = $AccountInfoCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'nonce': nonce, + 'consumers': consumers, + 'providers': providers, + 'sufficients': sufficients, + 'data': data.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AccountInfo && + other.nonce == nonce && + other.consumers == consumers && + other.providers == providers && + other.sufficients == sufficients && + other.data == data; + + @override + int get hashCode => Object.hash(nonce, consumers, providers, sufficients, data); +} + +class $AccountInfoCodec with _i1.Codec { + const $AccountInfoCodec(); + + @override + void encodeTo(AccountInfo obj, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(obj.nonce, output); + _i1.U32Codec.codec.encodeTo(obj.consumers, output); + _i1.U32Codec.codec.encodeTo(obj.providers, output); + _i1.U32Codec.codec.encodeTo(obj.sufficients, output); + _i2.AccountData.codec.encodeTo(obj.data, output); + } + + @override + AccountInfo decode(_i1.Input input) { + return AccountInfo( + nonce: _i1.U32Codec.codec.decode(input), + consumers: _i1.U32Codec.codec.decode(input), + providers: _i1.U32Codec.codec.decode(input), + sufficients: _i1.U32Codec.codec.decode(input), + data: _i2.AccountData.codec.decode(input), + ); + } + + @override + int sizeHint(AccountInfo obj) { + int size = 0; + size = size + _i1.U32Codec.codec.sizeHint(obj.nonce); + size = size + _i1.U32Codec.codec.sizeHint(obj.consumers); + size = size + _i1.U32Codec.codec.sizeHint(obj.providers); + size = size + _i1.U32Codec.codec.sizeHint(obj.sufficients); + size = size + _i2.AccountData.codec.sizeHint(obj.data); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/code_upgrade_authorization.dart b/quantus_sdk/lib/generated/planck/types/frame_system/code_upgrade_authorization.dart new file mode 100644 index 00000000..d89fc207 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/code_upgrade_authorization.dart @@ -0,0 +1,65 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../primitive_types/h256.dart' as _i2; + +class CodeUpgradeAuthorization { + const CodeUpgradeAuthorization({required this.codeHash, required this.checkVersion}); + + factory CodeUpgradeAuthorization.decode(_i1.Input input) { + return codec.decode(input); + } + + /// T::Hash + final _i2.H256 codeHash; + + /// bool + final bool checkVersion; + + static const $CodeUpgradeAuthorizationCodec codec = $CodeUpgradeAuthorizationCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'codeHash': codeHash.toList(), 'checkVersion': checkVersion}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is CodeUpgradeAuthorization && + _i4.listsEqual(other.codeHash, codeHash) && + other.checkVersion == checkVersion; + + @override + int get hashCode => Object.hash(codeHash, checkVersion); +} + +class $CodeUpgradeAuthorizationCodec with _i1.Codec { + const $CodeUpgradeAuthorizationCodec(); + + @override + void encodeTo(CodeUpgradeAuthorization obj, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(obj.codeHash, output); + _i1.BoolCodec.codec.encodeTo(obj.checkVersion, output); + } + + @override + CodeUpgradeAuthorization decode(_i1.Input input) { + return CodeUpgradeAuthorization( + codeHash: const _i1.U8ArrayCodec(32).decode(input), + checkVersion: _i1.BoolCodec.codec.decode(input), + ); + } + + @override + int sizeHint(CodeUpgradeAuthorization obj) { + int size = 0; + size = size + const _i2.H256Codec().sizeHint(obj.codeHash); + size = size + _i1.BoolCodec.codec.sizeHint(obj.checkVersion); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/dispatch_event_info.dart b/quantus_sdk/lib/generated/planck/types/frame_system/dispatch_event_info.dart new file mode 100644 index 00000000..764759fb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/dispatch_event_info.dart @@ -0,0 +1,70 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i5; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../frame_support/dispatch/dispatch_class.dart' as _i3; +import '../frame_support/dispatch/pays.dart' as _i4; +import '../sp_weights/weight_v2/weight.dart' as _i2; + +class DispatchEventInfo { + const DispatchEventInfo({required this.weight, required this.class_, required this.paysFee}); + + factory DispatchEventInfo.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Weight + final _i2.Weight weight; + + /// DispatchClass + final _i3.DispatchClass class_; + + /// Pays + final _i4.Pays paysFee; + + static const $DispatchEventInfoCodec codec = $DispatchEventInfoCodec(); + + _i5.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'weight': weight.toJson(), 'class': class_.toJson(), 'paysFee': paysFee.toJson()}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DispatchEventInfo && other.weight == weight && other.class_ == class_ && other.paysFee == paysFee; + + @override + int get hashCode => Object.hash(weight, class_, paysFee); +} + +class $DispatchEventInfoCodec with _i1.Codec { + const $DispatchEventInfoCodec(); + + @override + void encodeTo(DispatchEventInfo obj, _i1.Output output) { + _i2.Weight.codec.encodeTo(obj.weight, output); + _i3.DispatchClass.codec.encodeTo(obj.class_, output); + _i4.Pays.codec.encodeTo(obj.paysFee, output); + } + + @override + DispatchEventInfo decode(_i1.Input input) { + return DispatchEventInfo( + weight: _i2.Weight.codec.decode(input), + class_: _i3.DispatchClass.codec.decode(input), + paysFee: _i4.Pays.codec.decode(input), + ); + } + + @override + int sizeHint(DispatchEventInfo obj) { + int size = 0; + size = size + _i2.Weight.codec.sizeHint(obj.weight); + size = size + _i3.DispatchClass.codec.sizeHint(obj.class_); + size = size + _i4.Pays.codec.sizeHint(obj.paysFee); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/event_record.dart b/quantus_sdk/lib/generated/planck/types/frame_system/event_record.dart new file mode 100644 index 00000000..5987bf9b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/event_record.dart @@ -0,0 +1,75 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i5; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i6; + +import '../primitive_types/h256.dart' as _i4; +import '../quantus_runtime/runtime_event.dart' as _i3; +import 'phase.dart' as _i2; + +class EventRecord { + const EventRecord({required this.phase, required this.event, required this.topics}); + + factory EventRecord.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Phase + final _i2.Phase phase; + + /// E + final _i3.RuntimeEvent event; + + /// Vec + final List<_i4.H256> topics; + + static const $EventRecordCodec codec = $EventRecordCodec(); + + _i5.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'phase': phase.toJson(), + 'event': event.toJson(), + 'topics': topics.map((value) => value.toList()).toList(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is EventRecord && other.phase == phase && other.event == event && _i6.listsEqual(other.topics, topics); + + @override + int get hashCode => Object.hash(phase, event, topics); +} + +class $EventRecordCodec with _i1.Codec { + const $EventRecordCodec(); + + @override + void encodeTo(EventRecord obj, _i1.Output output) { + _i2.Phase.codec.encodeTo(obj.phase, output); + _i3.RuntimeEvent.codec.encodeTo(obj.event, output); + const _i1.SequenceCodec<_i4.H256>(_i4.H256Codec()).encodeTo(obj.topics, output); + } + + @override + EventRecord decode(_i1.Input input) { + return EventRecord( + phase: _i2.Phase.codec.decode(input), + event: _i3.RuntimeEvent.codec.decode(input), + topics: const _i1.SequenceCodec<_i4.H256>(_i4.H256Codec()).decode(input), + ); + } + + @override + int sizeHint(EventRecord obj) { + int size = 0; + size = size + _i2.Phase.codec.sizeHint(obj.phase); + size = size + _i3.RuntimeEvent.codec.sizeHint(obj.event); + size = size + const _i1.SequenceCodec<_i4.H256>(_i4.H256Codec()).sizeHint(obj.topics); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_genesis/check_genesis.dart b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_genesis/check_genesis.dart new file mode 100644 index 00000000..8868c238 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_genesis/check_genesis.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef CheckGenesis = dynamic; + +class CheckGenesisCodec with _i1.Codec { + const CheckGenesisCodec(); + + @override + CheckGenesis decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(CheckGenesis value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(CheckGenesis value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_mortality/check_mortality.dart b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_mortality/check_mortality.dart new file mode 100644 index 00000000..06298fe3 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_mortality/check_mortality.dart @@ -0,0 +1,25 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i2; + +import '../../../sp_runtime/generic/era/era.dart' as _i1; + +typedef CheckMortality = _i1.Era; + +class CheckMortalityCodec with _i2.Codec { + const CheckMortalityCodec(); + + @override + CheckMortality decode(_i2.Input input) { + return _i1.Era.codec.decode(input); + } + + @override + void encodeTo(CheckMortality value, _i2.Output output) { + _i1.Era.codec.encodeTo(value, output); + } + + @override + int sizeHint(CheckMortality value) { + return _i1.Era.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_non_zero_sender/check_non_zero_sender.dart b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_non_zero_sender/check_non_zero_sender.dart new file mode 100644 index 00000000..f2cf2cad --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_non_zero_sender/check_non_zero_sender.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef CheckNonZeroSender = dynamic; + +class CheckNonZeroSenderCodec with _i1.Codec { + const CheckNonZeroSenderCodec(); + + @override + CheckNonZeroSender decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(CheckNonZeroSender value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(CheckNonZeroSender value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_nonce/check_nonce.dart b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_nonce/check_nonce.dart new file mode 100644 index 00000000..5e91c23f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_nonce/check_nonce.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef CheckNonce = BigInt; + +class CheckNonceCodec with _i1.Codec { + const CheckNonceCodec(); + + @override + CheckNonce decode(_i1.Input input) { + return _i1.CompactBigIntCodec.codec.decode(input); + } + + @override + void encodeTo(CheckNonce value, _i1.Output output) { + _i1.CompactBigIntCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(CheckNonce value) { + return _i1.CompactBigIntCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_spec_version/check_spec_version.dart b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_spec_version/check_spec_version.dart new file mode 100644 index 00000000..8366f520 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_spec_version/check_spec_version.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef CheckSpecVersion = dynamic; + +class CheckSpecVersionCodec with _i1.Codec { + const CheckSpecVersionCodec(); + + @override + CheckSpecVersion decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(CheckSpecVersion value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(CheckSpecVersion value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_tx_version/check_tx_version.dart b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_tx_version/check_tx_version.dart new file mode 100644 index 00000000..68153e17 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_tx_version/check_tx_version.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef CheckTxVersion = dynamic; + +class CheckTxVersionCodec with _i1.Codec { + const CheckTxVersionCodec(); + + @override + CheckTxVersion decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(CheckTxVersion value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(CheckTxVersion value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_weight/check_weight.dart b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_weight/check_weight.dart new file mode 100644 index 00000000..8fec0eef --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/extensions/check_weight/check_weight.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef CheckWeight = dynamic; + +class CheckWeightCodec with _i1.Codec { + const CheckWeightCodec(); + + @override + CheckWeight decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(CheckWeight value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(CheckWeight value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/last_runtime_upgrade_info.dart b/quantus_sdk/lib/generated/planck/types/frame_system/last_runtime_upgrade_info.dart new file mode 100644 index 00000000..f36c690b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/last_runtime_upgrade_info.dart @@ -0,0 +1,62 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../cow_1.dart' as _i2; + +class LastRuntimeUpgradeInfo { + const LastRuntimeUpgradeInfo({required this.specVersion, required this.specName}); + + factory LastRuntimeUpgradeInfo.decode(_i1.Input input) { + return codec.decode(input); + } + + /// codec::Compact + final BigInt specVersion; + + /// Cow<'static, str> + final _i2.Cow specName; + + static const $LastRuntimeUpgradeInfoCodec codec = $LastRuntimeUpgradeInfoCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'specVersion': specVersion, 'specName': specName}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is LastRuntimeUpgradeInfo && other.specVersion == specVersion && other.specName == specName; + + @override + int get hashCode => Object.hash(specVersion, specName); +} + +class $LastRuntimeUpgradeInfoCodec with _i1.Codec { + const $LastRuntimeUpgradeInfoCodec(); + + @override + void encodeTo(LastRuntimeUpgradeInfo obj, _i1.Output output) { + _i1.CompactBigIntCodec.codec.encodeTo(obj.specVersion, output); + _i1.StrCodec.codec.encodeTo(obj.specName, output); + } + + @override + LastRuntimeUpgradeInfo decode(_i1.Input input) { + return LastRuntimeUpgradeInfo( + specVersion: _i1.CompactBigIntCodec.codec.decode(input), + specName: _i1.StrCodec.codec.decode(input), + ); + } + + @override + int sizeHint(LastRuntimeUpgradeInfo obj) { + int size = 0; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(obj.specVersion); + size = size + const _i2.CowCodec().sizeHint(obj.specName); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/limits/block_length.dart b/quantus_sdk/lib/generated/planck/types/frame_system/limits/block_length.dart new file mode 100644 index 00000000..8d610bb0 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/limits/block_length.dart @@ -0,0 +1,52 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../frame_support/dispatch/per_dispatch_class_3.dart' as _i2; + +class BlockLength { + const BlockLength({required this.max}); + + factory BlockLength.decode(_i1.Input input) { + return codec.decode(input); + } + + /// PerDispatchClass + final _i2.PerDispatchClass max; + + static const $BlockLengthCodec codec = $BlockLengthCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map> toJson() => {'max': max.toJson()}; + + @override + bool operator ==(Object other) => identical(this, other) || other is BlockLength && other.max == max; + + @override + int get hashCode => max.hashCode; +} + +class $BlockLengthCodec with _i1.Codec { + const $BlockLengthCodec(); + + @override + void encodeTo(BlockLength obj, _i1.Output output) { + _i2.PerDispatchClass.codec.encodeTo(obj.max, output); + } + + @override + BlockLength decode(_i1.Input input) { + return BlockLength(max: _i2.PerDispatchClass.codec.decode(input)); + } + + @override + int sizeHint(BlockLength obj) { + int size = 0; + size = size + _i2.PerDispatchClass.codec.sizeHint(obj.max); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/limits/block_weights.dart b/quantus_sdk/lib/generated/planck/types/frame_system/limits/block_weights.dart new file mode 100644 index 00000000..45160d0b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/limits/block_weights.dart @@ -0,0 +1,73 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../frame_support/dispatch/per_dispatch_class_2.dart' as _i3; +import '../../sp_weights/weight_v2/weight.dart' as _i2; + +class BlockWeights { + const BlockWeights({required this.baseBlock, required this.maxBlock, required this.perClass}); + + factory BlockWeights.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Weight + final _i2.Weight baseBlock; + + /// Weight + final _i2.Weight maxBlock; + + /// PerDispatchClass + final _i3.PerDispatchClass perClass; + + static const $BlockWeightsCodec codec = $BlockWeightsCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map> toJson() => { + 'baseBlock': baseBlock.toJson(), + 'maxBlock': maxBlock.toJson(), + 'perClass': perClass.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is BlockWeights && other.baseBlock == baseBlock && other.maxBlock == maxBlock && other.perClass == perClass; + + @override + int get hashCode => Object.hash(baseBlock, maxBlock, perClass); +} + +class $BlockWeightsCodec with _i1.Codec { + const $BlockWeightsCodec(); + + @override + void encodeTo(BlockWeights obj, _i1.Output output) { + _i2.Weight.codec.encodeTo(obj.baseBlock, output); + _i2.Weight.codec.encodeTo(obj.maxBlock, output); + _i3.PerDispatchClass.codec.encodeTo(obj.perClass, output); + } + + @override + BlockWeights decode(_i1.Input input) { + return BlockWeights( + baseBlock: _i2.Weight.codec.decode(input), + maxBlock: _i2.Weight.codec.decode(input), + perClass: _i3.PerDispatchClass.codec.decode(input), + ); + } + + @override + int sizeHint(BlockWeights obj) { + int size = 0; + size = size + _i2.Weight.codec.sizeHint(obj.baseBlock); + size = size + _i2.Weight.codec.sizeHint(obj.maxBlock); + size = size + _i3.PerDispatchClass.codec.sizeHint(obj.perClass); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/limits/weights_per_class.dart b/quantus_sdk/lib/generated/planck/types/frame_system/limits/weights_per_class.dart new file mode 100644 index 00000000..d3728830 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/limits/weights_per_class.dart @@ -0,0 +1,83 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../sp_weights/weight_v2/weight.dart' as _i2; + +class WeightsPerClass { + const WeightsPerClass({required this.baseExtrinsic, this.maxExtrinsic, this.maxTotal, this.reserved}); + + factory WeightsPerClass.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Weight + final _i2.Weight baseExtrinsic; + + /// Option + final _i2.Weight? maxExtrinsic; + + /// Option + final _i2.Weight? maxTotal; + + /// Option + final _i2.Weight? reserved; + + static const $WeightsPerClassCodec codec = $WeightsPerClassCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map?> toJson() => { + 'baseExtrinsic': baseExtrinsic.toJson(), + 'maxExtrinsic': maxExtrinsic?.toJson(), + 'maxTotal': maxTotal?.toJson(), + 'reserved': reserved?.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is WeightsPerClass && + other.baseExtrinsic == baseExtrinsic && + other.maxExtrinsic == maxExtrinsic && + other.maxTotal == maxTotal && + other.reserved == reserved; + + @override + int get hashCode => Object.hash(baseExtrinsic, maxExtrinsic, maxTotal, reserved); +} + +class $WeightsPerClassCodec with _i1.Codec { + const $WeightsPerClassCodec(); + + @override + void encodeTo(WeightsPerClass obj, _i1.Output output) { + _i2.Weight.codec.encodeTo(obj.baseExtrinsic, output); + const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).encodeTo(obj.maxExtrinsic, output); + const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).encodeTo(obj.maxTotal, output); + const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).encodeTo(obj.reserved, output); + } + + @override + WeightsPerClass decode(_i1.Input input) { + return WeightsPerClass( + baseExtrinsic: _i2.Weight.codec.decode(input), + maxExtrinsic: const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).decode(input), + maxTotal: const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).decode(input), + reserved: const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).decode(input), + ); + } + + @override + int sizeHint(WeightsPerClass obj) { + int size = 0; + size = size + _i2.Weight.codec.sizeHint(obj.baseExtrinsic); + size = size + const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).sizeHint(obj.maxExtrinsic); + size = size + const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).sizeHint(obj.maxTotal); + size = size + const _i1.OptionCodec<_i2.Weight>(_i2.Weight.codec).sizeHint(obj.reserved); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/frame_system/pallet/call.dart new file mode 100644 index 00000000..d63a4a81 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/pallet/call.dart @@ -0,0 +1,610 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../../primitive_types/h256.dart' as _i4; +import '../../tuples.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + Remark remark({required List remark}) { + return Remark(remark: remark); + } + + SetHeapPages setHeapPages({required BigInt pages}) { + return SetHeapPages(pages: pages); + } + + SetCode setCode({required List code}) { + return SetCode(code: code); + } + + SetCodeWithoutChecks setCodeWithoutChecks({required List code}) { + return SetCodeWithoutChecks(code: code); + } + + SetStorage setStorage({required List<_i3.Tuple2, List>> items}) { + return SetStorage(items: items); + } + + KillStorage killStorage({required List> keys}) { + return KillStorage(keys: keys); + } + + KillPrefix killPrefix({required List prefix, required int subkeys}) { + return KillPrefix(prefix: prefix, subkeys: subkeys); + } + + RemarkWithEvent remarkWithEvent({required List remark}) { + return RemarkWithEvent(remark: remark); + } + + AuthorizeUpgrade authorizeUpgrade({required _i4.H256 codeHash}) { + return AuthorizeUpgrade(codeHash: codeHash); + } + + AuthorizeUpgradeWithoutChecks authorizeUpgradeWithoutChecks({required _i4.H256 codeHash}) { + return AuthorizeUpgradeWithoutChecks(codeHash: codeHash); + } + + ApplyAuthorizedUpgrade applyAuthorizedUpgrade({required List code}) { + return ApplyAuthorizedUpgrade(code: code); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Remark._decode(input); + case 1: + return SetHeapPages._decode(input); + case 2: + return SetCode._decode(input); + case 3: + return SetCodeWithoutChecks._decode(input); + case 4: + return SetStorage._decode(input); + case 5: + return KillStorage._decode(input); + case 6: + return KillPrefix._decode(input); + case 7: + return RemarkWithEvent._decode(input); + case 9: + return AuthorizeUpgrade._decode(input); + case 10: + return AuthorizeUpgradeWithoutChecks._decode(input); + case 11: + return ApplyAuthorizedUpgrade._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Remark: + (value as Remark).encodeTo(output); + break; + case SetHeapPages: + (value as SetHeapPages).encodeTo(output); + break; + case SetCode: + (value as SetCode).encodeTo(output); + break; + case SetCodeWithoutChecks: + (value as SetCodeWithoutChecks).encodeTo(output); + break; + case SetStorage: + (value as SetStorage).encodeTo(output); + break; + case KillStorage: + (value as KillStorage).encodeTo(output); + break; + case KillPrefix: + (value as KillPrefix).encodeTo(output); + break; + case RemarkWithEvent: + (value as RemarkWithEvent).encodeTo(output); + break; + case AuthorizeUpgrade: + (value as AuthorizeUpgrade).encodeTo(output); + break; + case AuthorizeUpgradeWithoutChecks: + (value as AuthorizeUpgradeWithoutChecks).encodeTo(output); + break; + case ApplyAuthorizedUpgrade: + (value as ApplyAuthorizedUpgrade).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Remark: + return (value as Remark)._sizeHint(); + case SetHeapPages: + return (value as SetHeapPages)._sizeHint(); + case SetCode: + return (value as SetCode)._sizeHint(); + case SetCodeWithoutChecks: + return (value as SetCodeWithoutChecks)._sizeHint(); + case SetStorage: + return (value as SetStorage)._sizeHint(); + case KillStorage: + return (value as KillStorage)._sizeHint(); + case KillPrefix: + return (value as KillPrefix)._sizeHint(); + case RemarkWithEvent: + return (value as RemarkWithEvent)._sizeHint(); + case AuthorizeUpgrade: + return (value as AuthorizeUpgrade)._sizeHint(); + case AuthorizeUpgradeWithoutChecks: + return (value as AuthorizeUpgradeWithoutChecks)._sizeHint(); + case ApplyAuthorizedUpgrade: + return (value as ApplyAuthorizedUpgrade)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Make some on-chain remark. +/// +/// Can be executed by every `origin`. +class Remark extends Call { + const Remark({required this.remark}); + + factory Remark._decode(_i1.Input input) { + return Remark(remark: _i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List remark; + + @override + Map>> toJson() => { + 'remark': {'remark': remark}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(remark); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U8SequenceCodec.codec.encodeTo(remark, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Remark && _i5.listsEqual(other.remark, remark); + + @override + int get hashCode => remark.hashCode; +} + +/// Set the number of pages in the WebAssembly environment's heap. +class SetHeapPages extends Call { + const SetHeapPages({required this.pages}); + + factory SetHeapPages._decode(_i1.Input input) { + return SetHeapPages(pages: _i1.U64Codec.codec.decode(input)); + } + + /// u64 + final BigInt pages; + + @override + Map> toJson() => { + 'set_heap_pages': {'pages': pages}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U64Codec.codec.sizeHint(pages); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U64Codec.codec.encodeTo(pages, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is SetHeapPages && other.pages == pages; + + @override + int get hashCode => pages.hashCode; +} + +/// Set the new runtime code. +class SetCode extends Call { + const SetCode({required this.code}); + + factory SetCode._decode(_i1.Input input) { + return SetCode(code: _i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List code; + + @override + Map>> toJson() => { + 'set_code': {'code': code}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(code); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U8SequenceCodec.codec.encodeTo(code, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is SetCode && _i5.listsEqual(other.code, code); + + @override + int get hashCode => code.hashCode; +} + +/// Set the new runtime code without doing any checks of the given `code`. +/// +/// Note that runtime upgrades will not run if this is called with a not-increasing spec +/// version! +class SetCodeWithoutChecks extends Call { + const SetCodeWithoutChecks({required this.code}); + + factory SetCodeWithoutChecks._decode(_i1.Input input) { + return SetCodeWithoutChecks(code: _i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List code; + + @override + Map>> toJson() => { + 'set_code_without_checks': {'code': code}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(code); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.U8SequenceCodec.codec.encodeTo(code, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is SetCodeWithoutChecks && _i5.listsEqual(other.code, code); + + @override + int get hashCode => code.hashCode; +} + +/// Set some items of storage. +class SetStorage extends Call { + const SetStorage({required this.items}); + + factory SetStorage._decode(_i1.Input input) { + return SetStorage( + items: const _i1.SequenceCodec<_i3.Tuple2, List>>( + _i3.Tuple2Codec, List>(_i1.U8SequenceCodec.codec, _i1.U8SequenceCodec.codec), + ).decode(input), + ); + } + + /// Vec + final List<_i3.Tuple2, List>> items; + + @override + Map>>>> toJson() => { + 'set_storage': { + 'items': items.map((value) => [value.value0, value.value1]).toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i1.SequenceCodec<_i3.Tuple2, List>>( + _i3.Tuple2Codec, List>(_i1.U8SequenceCodec.codec, _i1.U8SequenceCodec.codec), + ).sizeHint(items); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.SequenceCodec<_i3.Tuple2, List>>( + _i3.Tuple2Codec, List>(_i1.U8SequenceCodec.codec, _i1.U8SequenceCodec.codec), + ).encodeTo(items, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is SetStorage && _i5.listsEqual(other.items, items); + + @override + int get hashCode => items.hashCode; +} + +/// Kill some items from storage. +class KillStorage extends Call { + const KillStorage({required this.keys}); + + factory KillStorage._decode(_i1.Input input) { + return KillStorage(keys: const _i1.SequenceCodec>(_i1.U8SequenceCodec.codec).decode(input)); + } + + /// Vec + final List> keys; + + @override + Map>>> toJson() => { + 'kill_storage': {'keys': keys.map((value) => value).toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.SequenceCodec>(_i1.U8SequenceCodec.codec).sizeHint(keys); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.SequenceCodec>(_i1.U8SequenceCodec.codec).encodeTo(keys, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is KillStorage && _i5.listsEqual(other.keys, keys); + + @override + int get hashCode => keys.hashCode; +} + +/// Kill all storage items with a key that starts with the given prefix. +/// +/// **NOTE:** We rely on the Root origin to provide us the number of subkeys under +/// the prefix we are removing to accurately calculate the weight of this function. +class KillPrefix extends Call { + const KillPrefix({required this.prefix, required this.subkeys}); + + factory KillPrefix._decode(_i1.Input input) { + return KillPrefix(prefix: _i1.U8SequenceCodec.codec.decode(input), subkeys: _i1.U32Codec.codec.decode(input)); + } + + /// Key + final List prefix; + + /// u32 + final int subkeys; + + @override + Map> toJson() => { + 'kill_prefix': {'prefix': prefix, 'subkeys': subkeys}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(prefix); + size = size + _i1.U32Codec.codec.sizeHint(subkeys); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.U8SequenceCodec.codec.encodeTo(prefix, output); + _i1.U32Codec.codec.encodeTo(subkeys, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is KillPrefix && _i5.listsEqual(other.prefix, prefix) && other.subkeys == subkeys; + + @override + int get hashCode => Object.hash(prefix, subkeys); +} + +/// Make some on-chain remark and emit event. +class RemarkWithEvent extends Call { + const RemarkWithEvent({required this.remark}); + + factory RemarkWithEvent._decode(_i1.Input input) { + return RemarkWithEvent(remark: _i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List remark; + + @override + Map>> toJson() => { + 'remark_with_event': {'remark': remark}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(remark); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i1.U8SequenceCodec.codec.encodeTo(remark, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RemarkWithEvent && _i5.listsEqual(other.remark, remark); + + @override + int get hashCode => remark.hashCode; +} + +/// Authorize an upgrade to a given `code_hash` for the runtime. The runtime can be supplied +/// later. +/// +/// This call requires Root origin. +class AuthorizeUpgrade extends Call { + const AuthorizeUpgrade({required this.codeHash}); + + factory AuthorizeUpgrade._decode(_i1.Input input) { + return AuthorizeUpgrade(codeHash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i4.H256 codeHash; + + @override + Map>> toJson() => { + 'authorize_upgrade': {'codeHash': codeHash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i4.H256Codec().sizeHint(codeHash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + const _i1.U8ArrayCodec(32).encodeTo(codeHash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is AuthorizeUpgrade && _i5.listsEqual(other.codeHash, codeHash); + + @override + int get hashCode => codeHash.hashCode; +} + +/// Authorize an upgrade to a given `code_hash` for the runtime. The runtime can be supplied +/// later. +/// +/// WARNING: This authorizes an upgrade that will take place without any safety checks, for +/// example that the spec name remains the same and that the version number increases. Not +/// recommended for normal use. Use `authorize_upgrade` instead. +/// +/// This call requires Root origin. +class AuthorizeUpgradeWithoutChecks extends Call { + const AuthorizeUpgradeWithoutChecks({required this.codeHash}); + + factory AuthorizeUpgradeWithoutChecks._decode(_i1.Input input) { + return AuthorizeUpgradeWithoutChecks(codeHash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i4.H256 codeHash; + + @override + Map>> toJson() => { + 'authorize_upgrade_without_checks': {'codeHash': codeHash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i4.H256Codec().sizeHint(codeHash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + const _i1.U8ArrayCodec(32).encodeTo(codeHash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is AuthorizeUpgradeWithoutChecks && _i5.listsEqual(other.codeHash, codeHash); + + @override + int get hashCode => codeHash.hashCode; +} + +/// Provide the preimage (runtime binary) `code` for an upgrade that has been authorized. +/// +/// If the authorization required a version check, this call will ensure the spec name +/// remains unchanged and that the spec version has increased. +/// +/// Depending on the runtime's `OnSetCode` configuration, this function may directly apply +/// the new `code` in the same block or attempt to schedule the upgrade. +/// +/// All origins are allowed. +class ApplyAuthorizedUpgrade extends Call { + const ApplyAuthorizedUpgrade({required this.code}); + + factory ApplyAuthorizedUpgrade._decode(_i1.Input input) { + return ApplyAuthorizedUpgrade(code: _i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List code; + + @override + Map>> toJson() => { + 'apply_authorized_upgrade': {'code': code}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(code); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i1.U8SequenceCodec.codec.encodeTo(code, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ApplyAuthorizedUpgrade && _i5.listsEqual(other.code, code); + + @override + int get hashCode => code.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/frame_system/pallet/error.dart new file mode 100644 index 00000000..09fee32c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/pallet/error.dart @@ -0,0 +1,92 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// Error for the System pallet +enum Error { + /// The name of specification does not match between the current runtime + /// and the new runtime. + invalidSpecName('InvalidSpecName', 0), + + /// The specification version is not allowed to decrease between the current runtime + /// and the new runtime. + specVersionNeedsToIncrease('SpecVersionNeedsToIncrease', 1), + + /// Failed to extract the runtime version from the new runtime. + /// + /// Either calling `Core_version` or decoding `RuntimeVersion` failed. + failedToExtractRuntimeVersion('FailedToExtractRuntimeVersion', 2), + + /// Suicide called when the account has non-default composite data. + nonDefaultComposite('NonDefaultComposite', 3), + + /// There is a non-zero reference count preventing the account from being purged. + nonZeroRefCount('NonZeroRefCount', 4), + + /// The origin filter prevent the call to be dispatched. + callFiltered('CallFiltered', 5), + + /// A multi-block migration is ongoing and prevents the current code from being replaced. + multiBlockMigrationsOngoing('MultiBlockMigrationsOngoing', 6), + + /// No upgrade authorized. + nothingAuthorized('NothingAuthorized', 7), + + /// The submitted code is not authorized. + unauthorized('Unauthorized', 8); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.invalidSpecName; + case 1: + return Error.specVersionNeedsToIncrease; + case 2: + return Error.failedToExtractRuntimeVersion; + case 3: + return Error.nonDefaultComposite; + case 4: + return Error.nonZeroRefCount; + case 5: + return Error.callFiltered; + case 6: + return Error.multiBlockMigrationsOngoing; + case 7: + return Error.nothingAuthorized; + case 8: + return Error.unauthorized; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/frame_system/pallet/event.dart new file mode 100644 index 00000000..fd6f224c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/pallet/event.dart @@ -0,0 +1,458 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i7; + +import '../../primitive_types/h256.dart' as _i6; +import '../../sp_core/crypto/account_id32.dart' as _i5; +import '../../sp_runtime/dispatch_error.dart' as _i4; +import '../dispatch_event_info.dart' as _i3; + +/// Event for the System pallet. +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Event { + const $Event(); + + ExtrinsicSuccess extrinsicSuccess({required _i3.DispatchEventInfo dispatchInfo}) { + return ExtrinsicSuccess(dispatchInfo: dispatchInfo); + } + + ExtrinsicFailed extrinsicFailed({ + required _i4.DispatchError dispatchError, + required _i3.DispatchEventInfo dispatchInfo, + }) { + return ExtrinsicFailed(dispatchError: dispatchError, dispatchInfo: dispatchInfo); + } + + CodeUpdated codeUpdated() { + return CodeUpdated(); + } + + NewAccount newAccount({required _i5.AccountId32 account}) { + return NewAccount(account: account); + } + + KilledAccount killedAccount({required _i5.AccountId32 account}) { + return KilledAccount(account: account); + } + + Remarked remarked({required _i5.AccountId32 sender, required _i6.H256 hash}) { + return Remarked(sender: sender, hash: hash); + } + + UpgradeAuthorized upgradeAuthorized({required _i6.H256 codeHash, required bool checkVersion}) { + return UpgradeAuthorized(codeHash: codeHash, checkVersion: checkVersion); + } + + RejectedInvalidAuthorizedUpgrade rejectedInvalidAuthorizedUpgrade({ + required _i6.H256 codeHash, + required _i4.DispatchError error, + }) { + return RejectedInvalidAuthorizedUpgrade(codeHash: codeHash, error: error); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return ExtrinsicSuccess._decode(input); + case 1: + return ExtrinsicFailed._decode(input); + case 2: + return const CodeUpdated(); + case 3: + return NewAccount._decode(input); + case 4: + return KilledAccount._decode(input); + case 5: + return Remarked._decode(input); + case 6: + return UpgradeAuthorized._decode(input); + case 7: + return RejectedInvalidAuthorizedUpgrade._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case ExtrinsicSuccess: + (value as ExtrinsicSuccess).encodeTo(output); + break; + case ExtrinsicFailed: + (value as ExtrinsicFailed).encodeTo(output); + break; + case CodeUpdated: + (value as CodeUpdated).encodeTo(output); + break; + case NewAccount: + (value as NewAccount).encodeTo(output); + break; + case KilledAccount: + (value as KilledAccount).encodeTo(output); + break; + case Remarked: + (value as Remarked).encodeTo(output); + break; + case UpgradeAuthorized: + (value as UpgradeAuthorized).encodeTo(output); + break; + case RejectedInvalidAuthorizedUpgrade: + (value as RejectedInvalidAuthorizedUpgrade).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case ExtrinsicSuccess: + return (value as ExtrinsicSuccess)._sizeHint(); + case ExtrinsicFailed: + return (value as ExtrinsicFailed)._sizeHint(); + case CodeUpdated: + return 1; + case NewAccount: + return (value as NewAccount)._sizeHint(); + case KilledAccount: + return (value as KilledAccount)._sizeHint(); + case Remarked: + return (value as Remarked)._sizeHint(); + case UpgradeAuthorized: + return (value as UpgradeAuthorized)._sizeHint(); + case RejectedInvalidAuthorizedUpgrade: + return (value as RejectedInvalidAuthorizedUpgrade)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// An extrinsic completed successfully. +class ExtrinsicSuccess extends Event { + const ExtrinsicSuccess({required this.dispatchInfo}); + + factory ExtrinsicSuccess._decode(_i1.Input input) { + return ExtrinsicSuccess(dispatchInfo: _i3.DispatchEventInfo.codec.decode(input)); + } + + /// DispatchEventInfo + final _i3.DispatchEventInfo dispatchInfo; + + @override + Map>> toJson() => { + 'ExtrinsicSuccess': {'dispatchInfo': dispatchInfo.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.DispatchEventInfo.codec.sizeHint(dispatchInfo); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.DispatchEventInfo.codec.encodeTo(dispatchInfo, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ExtrinsicSuccess && other.dispatchInfo == dispatchInfo; + + @override + int get hashCode => dispatchInfo.hashCode; +} + +/// An extrinsic failed. +class ExtrinsicFailed extends Event { + const ExtrinsicFailed({required this.dispatchError, required this.dispatchInfo}); + + factory ExtrinsicFailed._decode(_i1.Input input) { + return ExtrinsicFailed( + dispatchError: _i4.DispatchError.codec.decode(input), + dispatchInfo: _i3.DispatchEventInfo.codec.decode(input), + ); + } + + /// DispatchError + final _i4.DispatchError dispatchError; + + /// DispatchEventInfo + final _i3.DispatchEventInfo dispatchInfo; + + @override + Map>> toJson() => { + 'ExtrinsicFailed': {'dispatchError': dispatchError.toJson(), 'dispatchInfo': dispatchInfo.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i4.DispatchError.codec.sizeHint(dispatchError); + size = size + _i3.DispatchEventInfo.codec.sizeHint(dispatchInfo); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i4.DispatchError.codec.encodeTo(dispatchError, output); + _i3.DispatchEventInfo.codec.encodeTo(dispatchInfo, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ExtrinsicFailed && other.dispatchError == dispatchError && other.dispatchInfo == dispatchInfo; + + @override + int get hashCode => Object.hash(dispatchError, dispatchInfo); +} + +/// `:code` was updated. +class CodeUpdated extends Event { + const CodeUpdated(); + + @override + Map toJson() => {'CodeUpdated': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + } + + @override + bool operator ==(Object other) => other is CodeUpdated; + + @override + int get hashCode => runtimeType.hashCode; +} + +/// A new account was created. +class NewAccount extends Event { + const NewAccount({required this.account}); + + factory NewAccount._decode(_i1.Input input) { + return NewAccount(account: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i5.AccountId32 account; + + @override + Map>> toJson() => { + 'NewAccount': {'account': account.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i5.AccountId32Codec().sizeHint(account); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(account, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is NewAccount && _i7.listsEqual(other.account, account); + + @override + int get hashCode => account.hashCode; +} + +/// An account was reaped. +class KilledAccount extends Event { + const KilledAccount({required this.account}); + + factory KilledAccount._decode(_i1.Input input) { + return KilledAccount(account: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i5.AccountId32 account; + + @override + Map>> toJson() => { + 'KilledAccount': {'account': account.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i5.AccountId32Codec().sizeHint(account); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(account, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is KilledAccount && _i7.listsEqual(other.account, account); + + @override + int get hashCode => account.hashCode; +} + +/// On on-chain remark happened. +class Remarked extends Event { + const Remarked({required this.sender, required this.hash}); + + factory Remarked._decode(_i1.Input input) { + return Remarked(sender: const _i1.U8ArrayCodec(32).decode(input), hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i5.AccountId32 sender; + + /// T::Hash + final _i6.H256 hash; + + @override + Map>> toJson() => { + 'Remarked': {'sender': sender.toList(), 'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i5.AccountId32Codec().sizeHint(sender); + size = size + const _i6.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.U8ArrayCodec(32).encodeTo(sender, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Remarked && _i7.listsEqual(other.sender, sender) && _i7.listsEqual(other.hash, hash); + + @override + int get hashCode => Object.hash(sender, hash); +} + +/// An upgrade was authorized. +class UpgradeAuthorized extends Event { + const UpgradeAuthorized({required this.codeHash, required this.checkVersion}); + + factory UpgradeAuthorized._decode(_i1.Input input) { + return UpgradeAuthorized( + codeHash: const _i1.U8ArrayCodec(32).decode(input), + checkVersion: _i1.BoolCodec.codec.decode(input), + ); + } + + /// T::Hash + final _i6.H256 codeHash; + + /// bool + final bool checkVersion; + + @override + Map> toJson() => { + 'UpgradeAuthorized': {'codeHash': codeHash.toList(), 'checkVersion': checkVersion}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i6.H256Codec().sizeHint(codeHash); + size = size + _i1.BoolCodec.codec.sizeHint(checkVersion); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i1.U8ArrayCodec(32).encodeTo(codeHash, output); + _i1.BoolCodec.codec.encodeTo(checkVersion, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is UpgradeAuthorized && _i7.listsEqual(other.codeHash, codeHash) && other.checkVersion == checkVersion; + + @override + int get hashCode => Object.hash(codeHash, checkVersion); +} + +/// An invalid authorized upgrade was rejected while trying to apply it. +class RejectedInvalidAuthorizedUpgrade extends Event { + const RejectedInvalidAuthorizedUpgrade({required this.codeHash, required this.error}); + + factory RejectedInvalidAuthorizedUpgrade._decode(_i1.Input input) { + return RejectedInvalidAuthorizedUpgrade( + codeHash: const _i1.U8ArrayCodec(32).decode(input), + error: _i4.DispatchError.codec.decode(input), + ); + } + + /// T::Hash + final _i6.H256 codeHash; + + /// DispatchError + final _i4.DispatchError error; + + @override + Map> toJson() => { + 'RejectedInvalidAuthorizedUpgrade': {'codeHash': codeHash.toList(), 'error': error.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i6.H256Codec().sizeHint(codeHash); + size = size + _i4.DispatchError.codec.sizeHint(error); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + const _i1.U8ArrayCodec(32).encodeTo(codeHash, output); + _i4.DispatchError.codec.encodeTo(error, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RejectedInvalidAuthorizedUpgrade && _i7.listsEqual(other.codeHash, codeHash) && other.error == error; + + @override + int get hashCode => Object.hash(codeHash, error); +} diff --git a/quantus_sdk/lib/generated/planck/types/frame_system/phase.dart b/quantus_sdk/lib/generated/planck/types/frame_system/phase.dart new file mode 100644 index 00000000..a888c990 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/frame_system/phase.dart @@ -0,0 +1,159 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +abstract class Phase { + const Phase(); + + factory Phase.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $PhaseCodec codec = $PhaseCodec(); + + static const $Phase values = $Phase(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Phase { + const $Phase(); + + ApplyExtrinsic applyExtrinsic(int value0) { + return ApplyExtrinsic(value0); + } + + Finalization finalization() { + return Finalization(); + } + + Initialization initialization() { + return Initialization(); + } +} + +class $PhaseCodec with _i1.Codec { + const $PhaseCodec(); + + @override + Phase decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return ApplyExtrinsic._decode(input); + case 1: + return const Finalization(); + case 2: + return const Initialization(); + default: + throw Exception('Phase: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Phase value, _i1.Output output) { + switch (value.runtimeType) { + case ApplyExtrinsic: + (value as ApplyExtrinsic).encodeTo(output); + break; + case Finalization: + (value as Finalization).encodeTo(output); + break; + case Initialization: + (value as Initialization).encodeTo(output); + break; + default: + throw Exception('Phase: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Phase value) { + switch (value.runtimeType) { + case ApplyExtrinsic: + return (value as ApplyExtrinsic)._sizeHint(); + case Finalization: + return 1; + case Initialization: + return 1; + default: + throw Exception('Phase: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class ApplyExtrinsic extends Phase { + const ApplyExtrinsic(this.value0); + + factory ApplyExtrinsic._decode(_i1.Input input) { + return ApplyExtrinsic(_i1.U32Codec.codec.decode(input)); + } + + /// u32 + final int value0; + + @override + Map toJson() => {'ApplyExtrinsic': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ApplyExtrinsic && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Finalization extends Phase { + const Finalization(); + + @override + Map toJson() => {'Finalization': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + } + + @override + bool operator ==(Object other) => other is Finalization; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Initialization extends Phase { + const Initialization(); + + @override + Map toJson() => {'Initialization': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + } + + @override + bool operator ==(Object other) => other is Initialization; + + @override + int get hashCode => runtimeType.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/call.dart new file mode 100644 index 00000000..c5448eff --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/call.dart @@ -0,0 +1,2438 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_runtime/multiaddress/multi_address.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + Create create({required BigInt id, required _i3.MultiAddress admin, required BigInt minBalance}) { + return Create(id: id, admin: admin, minBalance: minBalance); + } + + ForceCreate forceCreate({ + required BigInt id, + required _i3.MultiAddress owner, + required bool isSufficient, + required BigInt minBalance, + }) { + return ForceCreate(id: id, owner: owner, isSufficient: isSufficient, minBalance: minBalance); + } + + StartDestroy startDestroy({required BigInt id}) { + return StartDestroy(id: id); + } + + DestroyAccounts destroyAccounts({required BigInt id}) { + return DestroyAccounts(id: id); + } + + DestroyApprovals destroyApprovals({required BigInt id}) { + return DestroyApprovals(id: id); + } + + FinishDestroy finishDestroy({required BigInt id}) { + return FinishDestroy(id: id); + } + + Mint mint({required BigInt id, required _i3.MultiAddress beneficiary, required BigInt amount}) { + return Mint(id: id, beneficiary: beneficiary, amount: amount); + } + + Burn burn({required BigInt id, required _i3.MultiAddress who, required BigInt amount}) { + return Burn(id: id, who: who, amount: amount); + } + + Transfer transfer({required BigInt id, required _i3.MultiAddress target, required BigInt amount}) { + return Transfer(id: id, target: target, amount: amount); + } + + TransferKeepAlive transferKeepAlive({required BigInt id, required _i3.MultiAddress target, required BigInt amount}) { + return TransferKeepAlive(id: id, target: target, amount: amount); + } + + ForceTransfer forceTransfer({ + required BigInt id, + required _i3.MultiAddress source, + required _i3.MultiAddress dest, + required BigInt amount, + }) { + return ForceTransfer(id: id, source: source, dest: dest, amount: amount); + } + + Freeze freeze({required BigInt id, required _i3.MultiAddress who}) { + return Freeze(id: id, who: who); + } + + Thaw thaw({required BigInt id, required _i3.MultiAddress who}) { + return Thaw(id: id, who: who); + } + + FreezeAsset freezeAsset({required BigInt id}) { + return FreezeAsset(id: id); + } + + ThawAsset thawAsset({required BigInt id}) { + return ThawAsset(id: id); + } + + TransferOwnership transferOwnership({required BigInt id, required _i3.MultiAddress owner}) { + return TransferOwnership(id: id, owner: owner); + } + + SetTeam setTeam({ + required BigInt id, + required _i3.MultiAddress issuer, + required _i3.MultiAddress admin, + required _i3.MultiAddress freezer, + }) { + return SetTeam(id: id, issuer: issuer, admin: admin, freezer: freezer); + } + + SetMetadata setMetadata({ + required BigInt id, + required List name, + required List symbol, + required int decimals, + }) { + return SetMetadata(id: id, name: name, symbol: symbol, decimals: decimals); + } + + ClearMetadata clearMetadata({required BigInt id}) { + return ClearMetadata(id: id); + } + + ForceSetMetadata forceSetMetadata({ + required BigInt id, + required List name, + required List symbol, + required int decimals, + required bool isFrozen, + }) { + return ForceSetMetadata(id: id, name: name, symbol: symbol, decimals: decimals, isFrozen: isFrozen); + } + + ForceClearMetadata forceClearMetadata({required BigInt id}) { + return ForceClearMetadata(id: id); + } + + ForceAssetStatus forceAssetStatus({ + required BigInt id, + required _i3.MultiAddress owner, + required _i3.MultiAddress issuer, + required _i3.MultiAddress admin, + required _i3.MultiAddress freezer, + required BigInt minBalance, + required bool isSufficient, + required bool isFrozen, + }) { + return ForceAssetStatus( + id: id, + owner: owner, + issuer: issuer, + admin: admin, + freezer: freezer, + minBalance: minBalance, + isSufficient: isSufficient, + isFrozen: isFrozen, + ); + } + + ApproveTransfer approveTransfer({required BigInt id, required _i3.MultiAddress delegate, required BigInt amount}) { + return ApproveTransfer(id: id, delegate: delegate, amount: amount); + } + + CancelApproval cancelApproval({required BigInt id, required _i3.MultiAddress delegate}) { + return CancelApproval(id: id, delegate: delegate); + } + + ForceCancelApproval forceCancelApproval({ + required BigInt id, + required _i3.MultiAddress owner, + required _i3.MultiAddress delegate, + }) { + return ForceCancelApproval(id: id, owner: owner, delegate: delegate); + } + + TransferApproved transferApproved({ + required BigInt id, + required _i3.MultiAddress owner, + required _i3.MultiAddress destination, + required BigInt amount, + }) { + return TransferApproved(id: id, owner: owner, destination: destination, amount: amount); + } + + Touch touch({required BigInt id}) { + return Touch(id: id); + } + + Refund refund({required BigInt id, required bool allowBurn}) { + return Refund(id: id, allowBurn: allowBurn); + } + + SetMinBalance setMinBalance({required BigInt id, required BigInt minBalance}) { + return SetMinBalance(id: id, minBalance: minBalance); + } + + TouchOther touchOther({required BigInt id, required _i3.MultiAddress who}) { + return TouchOther(id: id, who: who); + } + + RefundOther refundOther({required BigInt id, required _i3.MultiAddress who}) { + return RefundOther(id: id, who: who); + } + + Block block({required BigInt id, required _i3.MultiAddress who}) { + return Block(id: id, who: who); + } + + TransferAll transferAll({required BigInt id, required _i3.MultiAddress dest, required bool keepAlive}) { + return TransferAll(id: id, dest: dest, keepAlive: keepAlive); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Create._decode(input); + case 1: + return ForceCreate._decode(input); + case 2: + return StartDestroy._decode(input); + case 3: + return DestroyAccounts._decode(input); + case 4: + return DestroyApprovals._decode(input); + case 5: + return FinishDestroy._decode(input); + case 6: + return Mint._decode(input); + case 7: + return Burn._decode(input); + case 8: + return Transfer._decode(input); + case 9: + return TransferKeepAlive._decode(input); + case 10: + return ForceTransfer._decode(input); + case 11: + return Freeze._decode(input); + case 12: + return Thaw._decode(input); + case 13: + return FreezeAsset._decode(input); + case 14: + return ThawAsset._decode(input); + case 15: + return TransferOwnership._decode(input); + case 16: + return SetTeam._decode(input); + case 17: + return SetMetadata._decode(input); + case 18: + return ClearMetadata._decode(input); + case 19: + return ForceSetMetadata._decode(input); + case 20: + return ForceClearMetadata._decode(input); + case 21: + return ForceAssetStatus._decode(input); + case 22: + return ApproveTransfer._decode(input); + case 23: + return CancelApproval._decode(input); + case 24: + return ForceCancelApproval._decode(input); + case 25: + return TransferApproved._decode(input); + case 26: + return Touch._decode(input); + case 27: + return Refund._decode(input); + case 28: + return SetMinBalance._decode(input); + case 29: + return TouchOther._decode(input); + case 30: + return RefundOther._decode(input); + case 31: + return Block._decode(input); + case 32: + return TransferAll._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Create: + (value as Create).encodeTo(output); + break; + case ForceCreate: + (value as ForceCreate).encodeTo(output); + break; + case StartDestroy: + (value as StartDestroy).encodeTo(output); + break; + case DestroyAccounts: + (value as DestroyAccounts).encodeTo(output); + break; + case DestroyApprovals: + (value as DestroyApprovals).encodeTo(output); + break; + case FinishDestroy: + (value as FinishDestroy).encodeTo(output); + break; + case Mint: + (value as Mint).encodeTo(output); + break; + case Burn: + (value as Burn).encodeTo(output); + break; + case Transfer: + (value as Transfer).encodeTo(output); + break; + case TransferKeepAlive: + (value as TransferKeepAlive).encodeTo(output); + break; + case ForceTransfer: + (value as ForceTransfer).encodeTo(output); + break; + case Freeze: + (value as Freeze).encodeTo(output); + break; + case Thaw: + (value as Thaw).encodeTo(output); + break; + case FreezeAsset: + (value as FreezeAsset).encodeTo(output); + break; + case ThawAsset: + (value as ThawAsset).encodeTo(output); + break; + case TransferOwnership: + (value as TransferOwnership).encodeTo(output); + break; + case SetTeam: + (value as SetTeam).encodeTo(output); + break; + case SetMetadata: + (value as SetMetadata).encodeTo(output); + break; + case ClearMetadata: + (value as ClearMetadata).encodeTo(output); + break; + case ForceSetMetadata: + (value as ForceSetMetadata).encodeTo(output); + break; + case ForceClearMetadata: + (value as ForceClearMetadata).encodeTo(output); + break; + case ForceAssetStatus: + (value as ForceAssetStatus).encodeTo(output); + break; + case ApproveTransfer: + (value as ApproveTransfer).encodeTo(output); + break; + case CancelApproval: + (value as CancelApproval).encodeTo(output); + break; + case ForceCancelApproval: + (value as ForceCancelApproval).encodeTo(output); + break; + case TransferApproved: + (value as TransferApproved).encodeTo(output); + break; + case Touch: + (value as Touch).encodeTo(output); + break; + case Refund: + (value as Refund).encodeTo(output); + break; + case SetMinBalance: + (value as SetMinBalance).encodeTo(output); + break; + case TouchOther: + (value as TouchOther).encodeTo(output); + break; + case RefundOther: + (value as RefundOther).encodeTo(output); + break; + case Block: + (value as Block).encodeTo(output); + break; + case TransferAll: + (value as TransferAll).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Create: + return (value as Create)._sizeHint(); + case ForceCreate: + return (value as ForceCreate)._sizeHint(); + case StartDestroy: + return (value as StartDestroy)._sizeHint(); + case DestroyAccounts: + return (value as DestroyAccounts)._sizeHint(); + case DestroyApprovals: + return (value as DestroyApprovals)._sizeHint(); + case FinishDestroy: + return (value as FinishDestroy)._sizeHint(); + case Mint: + return (value as Mint)._sizeHint(); + case Burn: + return (value as Burn)._sizeHint(); + case Transfer: + return (value as Transfer)._sizeHint(); + case TransferKeepAlive: + return (value as TransferKeepAlive)._sizeHint(); + case ForceTransfer: + return (value as ForceTransfer)._sizeHint(); + case Freeze: + return (value as Freeze)._sizeHint(); + case Thaw: + return (value as Thaw)._sizeHint(); + case FreezeAsset: + return (value as FreezeAsset)._sizeHint(); + case ThawAsset: + return (value as ThawAsset)._sizeHint(); + case TransferOwnership: + return (value as TransferOwnership)._sizeHint(); + case SetTeam: + return (value as SetTeam)._sizeHint(); + case SetMetadata: + return (value as SetMetadata)._sizeHint(); + case ClearMetadata: + return (value as ClearMetadata)._sizeHint(); + case ForceSetMetadata: + return (value as ForceSetMetadata)._sizeHint(); + case ForceClearMetadata: + return (value as ForceClearMetadata)._sizeHint(); + case ForceAssetStatus: + return (value as ForceAssetStatus)._sizeHint(); + case ApproveTransfer: + return (value as ApproveTransfer)._sizeHint(); + case CancelApproval: + return (value as CancelApproval)._sizeHint(); + case ForceCancelApproval: + return (value as ForceCancelApproval)._sizeHint(); + case TransferApproved: + return (value as TransferApproved)._sizeHint(); + case Touch: + return (value as Touch)._sizeHint(); + case Refund: + return (value as Refund)._sizeHint(); + case SetMinBalance: + return (value as SetMinBalance)._sizeHint(); + case TouchOther: + return (value as TouchOther)._sizeHint(); + case RefundOther: + return (value as RefundOther)._sizeHint(); + case Block: + return (value as Block)._sizeHint(); + case TransferAll: + return (value as TransferAll)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Issue a new class of fungible assets from a public origin. +/// +/// This new asset class has no assets initially and its owner is the origin. +/// +/// The origin must conform to the configured `CreateOrigin` and have sufficient funds free. +/// +/// Funds of sender are reserved by `AssetDeposit`. +/// +/// Parameters: +/// - `id`: The identifier of the new asset. This must not be currently in use to identify +/// an existing asset. If [`NextAssetId`] is set, then this must be equal to it. +/// - `admin`: The admin of this class of assets. The admin is the initial address of each +/// member of the asset class's admin team. +/// - `min_balance`: The minimum balance of this new asset that any single account must +/// have. If an account's balance is reduced below this, then it collapses to zero. +/// +/// Emits `Created` event when successful. +/// +/// Weight: `O(1)` +class Create extends Call { + const Create({required this.id, required this.admin, required this.minBalance}); + + factory Create._decode(_i1.Input input) { + return Create( + id: _i1.CompactBigIntCodec.codec.decode(input), + admin: _i3.MultiAddress.codec.decode(input), + minBalance: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress admin; + + /// T::Balance + final BigInt minBalance; + + @override + Map> toJson() => { + 'create': {'id': id, 'admin': admin.toJson(), 'minBalance': minBalance}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(admin); + size = size + _i1.U128Codec.codec.sizeHint(minBalance); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(admin, output); + _i1.U128Codec.codec.encodeTo(minBalance, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Create && other.id == id && other.admin == admin && other.minBalance == minBalance; + + @override + int get hashCode => Object.hash(id, admin, minBalance); +} + +/// Issue a new class of fungible assets from a privileged origin. +/// +/// This new asset class has no assets initially. +/// +/// The origin must conform to `ForceOrigin`. +/// +/// Unlike `create`, no funds are reserved. +/// +/// - `id`: The identifier of the new asset. This must not be currently in use to identify +/// an existing asset. If [`NextAssetId`] is set, then this must be equal to it. +/// - `owner`: The owner of this class of assets. The owner has full superuser permissions +/// over this asset, but may later change and configure the permissions using +/// `transfer_ownership` and `set_team`. +/// - `min_balance`: The minimum balance of this new asset that any single account must +/// have. If an account's balance is reduced below this, then it collapses to zero. +/// +/// Emits `ForceCreated` event when successful. +/// +/// Weight: `O(1)` +class ForceCreate extends Call { + const ForceCreate({required this.id, required this.owner, required this.isSufficient, required this.minBalance}); + + factory ForceCreate._decode(_i1.Input input) { + return ForceCreate( + id: _i1.CompactBigIntCodec.codec.decode(input), + owner: _i3.MultiAddress.codec.decode(input), + isSufficient: _i1.BoolCodec.codec.decode(input), + minBalance: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress owner; + + /// bool + final bool isSufficient; + + /// T::Balance + final BigInt minBalance; + + @override + Map> toJson() => { + 'force_create': {'id': id, 'owner': owner.toJson(), 'isSufficient': isSufficient, 'minBalance': minBalance}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(owner); + size = size + _i1.BoolCodec.codec.sizeHint(isSufficient); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(minBalance); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(owner, output); + _i1.BoolCodec.codec.encodeTo(isSufficient, output); + _i1.CompactBigIntCodec.codec.encodeTo(minBalance, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ForceCreate && + other.id == id && + other.owner == owner && + other.isSufficient == isSufficient && + other.minBalance == minBalance; + + @override + int get hashCode => Object.hash(id, owner, isSufficient, minBalance); +} + +/// Start the process of destroying a fungible asset class. +/// +/// `start_destroy` is the first in a series of extrinsics that should be called, to allow +/// destruction of an asset class. +/// +/// The origin must conform to `ForceOrigin` or must be `Signed` by the asset's `owner`. +/// +/// - `id`: The identifier of the asset to be destroyed. This must identify an existing +/// asset. +/// +/// It will fail with either [`Error::ContainsHolds`] or [`Error::ContainsFreezes`] if +/// an account contains holds or freezes in place. +class StartDestroy extends Call { + const StartDestroy({required this.id}); + + factory StartDestroy._decode(_i1.Input input) { + return StartDestroy(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'start_destroy': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is StartDestroy && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Destroy all accounts associated with a given asset. +/// +/// `destroy_accounts` should only be called after `start_destroy` has been called, and the +/// asset is in a `Destroying` state. +/// +/// Due to weight restrictions, this function may need to be called multiple times to fully +/// destroy all accounts. It will destroy `RemoveItemsLimit` accounts at a time. +/// +/// - `id`: The identifier of the asset to be destroyed. This must identify an existing +/// asset. +/// +/// Each call emits the `Event::DestroyedAccounts` event. +class DestroyAccounts extends Call { + const DestroyAccounts({required this.id}); + + factory DestroyAccounts._decode(_i1.Input input) { + return DestroyAccounts(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'destroy_accounts': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is DestroyAccounts && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Destroy all approvals associated with a given asset up to the max (T::RemoveItemsLimit). +/// +/// `destroy_approvals` should only be called after `start_destroy` has been called, and the +/// asset is in a `Destroying` state. +/// +/// Due to weight restrictions, this function may need to be called multiple times to fully +/// destroy all approvals. It will destroy `RemoveItemsLimit` approvals at a time. +/// +/// - `id`: The identifier of the asset to be destroyed. This must identify an existing +/// asset. +/// +/// Each call emits the `Event::DestroyedApprovals` event. +class DestroyApprovals extends Call { + const DestroyApprovals({required this.id}); + + factory DestroyApprovals._decode(_i1.Input input) { + return DestroyApprovals(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'destroy_approvals': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is DestroyApprovals && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Complete destroying asset and unreserve currency. +/// +/// `finish_destroy` should only be called after `start_destroy` has been called, and the +/// asset is in a `Destroying` state. All accounts or approvals should be destroyed before +/// hand. +/// +/// - `id`: The identifier of the asset to be destroyed. This must identify an existing +/// asset. +/// +/// Each successful call emits the `Event::Destroyed` event. +class FinishDestroy extends Call { + const FinishDestroy({required this.id}); + + factory FinishDestroy._decode(_i1.Input input) { + return FinishDestroy(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'finish_destroy': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is FinishDestroy && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Mint assets of a particular class. +/// +/// The origin must be Signed and the sender must be the Issuer of the asset `id`. +/// +/// - `id`: The identifier of the asset to have some amount minted. +/// - `beneficiary`: The account to be credited with the minted assets. +/// - `amount`: The amount of the asset to be minted. +/// +/// Emits `Issued` event when successful. +/// +/// Weight: `O(1)` +/// Modes: Pre-existing balance of `beneficiary`; Account pre-existence of `beneficiary`. +class Mint extends Call { + const Mint({required this.id, required this.beneficiary, required this.amount}); + + factory Mint._decode(_i1.Input input) { + return Mint( + id: _i1.CompactBigIntCodec.codec.decode(input), + beneficiary: _i3.MultiAddress.codec.decode(input), + amount: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress beneficiary; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'mint': {'id': id, 'beneficiary': beneficiary.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(beneficiary); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(beneficiary, output); + _i1.CompactBigIntCodec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Mint && other.id == id && other.beneficiary == beneficiary && other.amount == amount; + + @override + int get hashCode => Object.hash(id, beneficiary, amount); +} + +/// Reduce the balance of `who` by as much as possible up to `amount` assets of `id`. +/// +/// Origin must be Signed and the sender should be the Manager of the asset `id`. +/// +/// Bails with `NoAccount` if the `who` is already dead. +/// +/// - `id`: The identifier of the asset to have some amount burned. +/// - `who`: The account to be debited from. +/// - `amount`: The maximum amount by which `who`'s balance should be reduced. +/// +/// Emits `Burned` with the actual amount burned. If this takes the balance to below the +/// minimum for the asset, then the amount burned is increased to take it to zero. +/// +/// Weight: `O(1)` +/// Modes: Post-existence of `who`; Pre & post Zombie-status of `who`. +class Burn extends Call { + const Burn({required this.id, required this.who, required this.amount}); + + factory Burn._decode(_i1.Input input) { + return Burn( + id: _i1.CompactBigIntCodec.codec.decode(input), + who: _i3.MultiAddress.codec.decode(input), + amount: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'burn': {'id': id, 'who': who.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(who); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(who, output); + _i1.CompactBigIntCodec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Burn && other.id == id && other.who == who && other.amount == amount; + + @override + int get hashCode => Object.hash(id, who, amount); +} + +/// Move some assets from the sender account to another. +/// +/// Origin must be Signed. +/// +/// - `id`: The identifier of the asset to have some amount transferred. +/// - `target`: The account to be credited. +/// - `amount`: The amount by which the sender's balance of assets should be reduced and +/// `target`'s balance increased. The amount actually transferred may be slightly greater in +/// the case that the transfer would otherwise take the sender balance above zero but below +/// the minimum balance. Must be greater than zero. +/// +/// Emits `Transferred` with the actual amount transferred. If this takes the source balance +/// to below the minimum for the asset, then the amount transferred is increased to take it +/// to zero. +/// +/// Weight: `O(1)` +/// Modes: Pre-existence of `target`; Post-existence of sender; Account pre-existence of +/// `target`. +class Transfer extends Call { + const Transfer({required this.id, required this.target, required this.amount}); + + factory Transfer._decode(_i1.Input input) { + return Transfer( + id: _i1.CompactBigIntCodec.codec.decode(input), + target: _i3.MultiAddress.codec.decode(input), + amount: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress target; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'transfer': {'id': id, 'target': target.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(target); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(target, output); + _i1.CompactBigIntCodec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Transfer && other.id == id && other.target == target && other.amount == amount; + + @override + int get hashCode => Object.hash(id, target, amount); +} + +/// Move some assets from the sender account to another, keeping the sender account alive. +/// +/// Origin must be Signed. +/// +/// - `id`: The identifier of the asset to have some amount transferred. +/// - `target`: The account to be credited. +/// - `amount`: The amount by which the sender's balance of assets should be reduced and +/// `target`'s balance increased. The amount actually transferred may be slightly greater in +/// the case that the transfer would otherwise take the sender balance above zero but below +/// the minimum balance. Must be greater than zero. +/// +/// Emits `Transferred` with the actual amount transferred. If this takes the source balance +/// to below the minimum for the asset, then the amount transferred is increased to take it +/// to zero. +/// +/// Weight: `O(1)` +/// Modes: Pre-existence of `target`; Post-existence of sender; Account pre-existence of +/// `target`. +class TransferKeepAlive extends Call { + const TransferKeepAlive({required this.id, required this.target, required this.amount}); + + factory TransferKeepAlive._decode(_i1.Input input) { + return TransferKeepAlive( + id: _i1.CompactBigIntCodec.codec.decode(input), + target: _i3.MultiAddress.codec.decode(input), + amount: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress target; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'transfer_keep_alive': {'id': id, 'target': target.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(target); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(target, output); + _i1.CompactBigIntCodec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TransferKeepAlive && other.id == id && other.target == target && other.amount == amount; + + @override + int get hashCode => Object.hash(id, target, amount); +} + +/// Move some assets from one account to another. +/// +/// Origin must be Signed and the sender should be the Admin of the asset `id`. +/// +/// - `id`: The identifier of the asset to have some amount transferred. +/// - `source`: The account to be debited. +/// - `dest`: The account to be credited. +/// - `amount`: The amount by which the `source`'s balance of assets should be reduced and +/// `dest`'s balance increased. The amount actually transferred may be slightly greater in +/// the case that the transfer would otherwise take the `source` balance above zero but +/// below the minimum balance. Must be greater than zero. +/// +/// Emits `Transferred` with the actual amount transferred. If this takes the source balance +/// to below the minimum for the asset, then the amount transferred is increased to take it +/// to zero. +/// +/// Weight: `O(1)` +/// Modes: Pre-existence of `dest`; Post-existence of `source`; Account pre-existence of +/// `dest`. +class ForceTransfer extends Call { + const ForceTransfer({required this.id, required this.source, required this.dest, required this.amount}); + + factory ForceTransfer._decode(_i1.Input input) { + return ForceTransfer( + id: _i1.CompactBigIntCodec.codec.decode(input), + source: _i3.MultiAddress.codec.decode(input), + dest: _i3.MultiAddress.codec.decode(input), + amount: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress source; + + /// AccountIdLookupOf + final _i3.MultiAddress dest; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'force_transfer': {'id': id, 'source': source.toJson(), 'dest': dest.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(source); + size = size + _i3.MultiAddress.codec.sizeHint(dest); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(source, output); + _i3.MultiAddress.codec.encodeTo(dest, output); + _i1.CompactBigIntCodec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ForceTransfer && + other.id == id && + other.source == source && + other.dest == dest && + other.amount == amount; + + @override + int get hashCode => Object.hash(id, source, dest, amount); +} + +/// Disallow further unprivileged transfers of an asset `id` from an account `who`. `who` +/// must already exist as an entry in `Account`s of the asset. If you want to freeze an +/// account that does not have an entry, use `touch_other` first. +/// +/// Origin must be Signed and the sender should be the Freezer of the asset `id`. +/// +/// - `id`: The identifier of the asset to be frozen. +/// - `who`: The account to be frozen. +/// +/// Emits `Frozen`. +/// +/// Weight: `O(1)` +class Freeze extends Call { + const Freeze({required this.id, required this.who}); + + factory Freeze._decode(_i1.Input input) { + return Freeze(id: _i1.CompactBigIntCodec.codec.decode(input), who: _i3.MultiAddress.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + @override + Map> toJson() => { + 'freeze': {'id': id, 'who': who.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Freeze && other.id == id && other.who == who; + + @override + int get hashCode => Object.hash(id, who); +} + +/// Allow unprivileged transfers to and from an account again. +/// +/// Origin must be Signed and the sender should be the Admin of the asset `id`. +/// +/// - `id`: The identifier of the asset to be frozen. +/// - `who`: The account to be unfrozen. +/// +/// Emits `Thawed`. +/// +/// Weight: `O(1)` +class Thaw extends Call { + const Thaw({required this.id, required this.who}); + + factory Thaw._decode(_i1.Input input) { + return Thaw(id: _i1.CompactBigIntCodec.codec.decode(input), who: _i3.MultiAddress.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + @override + Map> toJson() => { + 'thaw': {'id': id, 'who': who.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Thaw && other.id == id && other.who == who; + + @override + int get hashCode => Object.hash(id, who); +} + +/// Disallow further unprivileged transfers for the asset class. +/// +/// Origin must be Signed and the sender should be the Freezer of the asset `id`. +/// +/// - `id`: The identifier of the asset to be frozen. +/// +/// Emits `Frozen`. +/// +/// Weight: `O(1)` +class FreezeAsset extends Call { + const FreezeAsset({required this.id}); + + factory FreezeAsset._decode(_i1.Input input) { + return FreezeAsset(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'freeze_asset': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is FreezeAsset && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Allow unprivileged transfers for the asset again. +/// +/// Origin must be Signed and the sender should be the Admin of the asset `id`. +/// +/// - `id`: The identifier of the asset to be thawed. +/// +/// Emits `Thawed`. +/// +/// Weight: `O(1)` +class ThawAsset extends Call { + const ThawAsset({required this.id}); + + factory ThawAsset._decode(_i1.Input input) { + return ThawAsset(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'thaw_asset': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ThawAsset && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Change the Owner of an asset. +/// +/// Origin must be Signed and the sender should be the Owner of the asset `id`. +/// +/// - `id`: The identifier of the asset. +/// - `owner`: The new Owner of this asset. +/// +/// Emits `OwnerChanged`. +/// +/// Weight: `O(1)` +class TransferOwnership extends Call { + const TransferOwnership({required this.id, required this.owner}); + + factory TransferOwnership._decode(_i1.Input input) { + return TransferOwnership( + id: _i1.CompactBigIntCodec.codec.decode(input), + owner: _i3.MultiAddress.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress owner; + + @override + Map> toJson() => { + 'transfer_ownership': {'id': id, 'owner': owner.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(owner); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(15, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(owner, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TransferOwnership && other.id == id && other.owner == owner; + + @override + int get hashCode => Object.hash(id, owner); +} + +/// Change the Issuer, Admin and Freezer of an asset. +/// +/// Origin must be Signed and the sender should be the Owner of the asset `id`. +/// +/// - `id`: The identifier of the asset to be frozen. +/// - `issuer`: The new Issuer of this asset. +/// - `admin`: The new Admin of this asset. +/// - `freezer`: The new Freezer of this asset. +/// +/// Emits `TeamChanged`. +/// +/// Weight: `O(1)` +class SetTeam extends Call { + const SetTeam({required this.id, required this.issuer, required this.admin, required this.freezer}); + + factory SetTeam._decode(_i1.Input input) { + return SetTeam( + id: _i1.CompactBigIntCodec.codec.decode(input), + issuer: _i3.MultiAddress.codec.decode(input), + admin: _i3.MultiAddress.codec.decode(input), + freezer: _i3.MultiAddress.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress issuer; + + /// AccountIdLookupOf + final _i3.MultiAddress admin; + + /// AccountIdLookupOf + final _i3.MultiAddress freezer; + + @override + Map> toJson() => { + 'set_team': {'id': id, 'issuer': issuer.toJson(), 'admin': admin.toJson(), 'freezer': freezer.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(issuer); + size = size + _i3.MultiAddress.codec.sizeHint(admin); + size = size + _i3.MultiAddress.codec.sizeHint(freezer); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(16, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(issuer, output); + _i3.MultiAddress.codec.encodeTo(admin, output); + _i3.MultiAddress.codec.encodeTo(freezer, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SetTeam && other.id == id && other.issuer == issuer && other.admin == admin && other.freezer == freezer; + + @override + int get hashCode => Object.hash(id, issuer, admin, freezer); +} + +/// Set the metadata for an asset. +/// +/// Origin must be Signed and the sender should be the Owner of the asset `id`. +/// +/// Funds of sender are reserved according to the formula: +/// `MetadataDepositBase + MetadataDepositPerByte * (name.len + symbol.len)` taking into +/// account any already reserved funds. +/// +/// - `id`: The identifier of the asset to update. +/// - `name`: The user friendly name of this asset. Limited in length by `StringLimit`. +/// - `symbol`: The exchange symbol for this asset. Limited in length by `StringLimit`. +/// - `decimals`: The number of decimals this asset uses to represent one unit. +/// +/// Emits `MetadataSet`. +/// +/// Weight: `O(1)` +class SetMetadata extends Call { + const SetMetadata({required this.id, required this.name, required this.symbol, required this.decimals}); + + factory SetMetadata._decode(_i1.Input input) { + return SetMetadata( + id: _i1.CompactBigIntCodec.codec.decode(input), + name: _i1.U8SequenceCodec.codec.decode(input), + symbol: _i1.U8SequenceCodec.codec.decode(input), + decimals: _i1.U8Codec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// Vec + final List name; + + /// Vec + final List symbol; + + /// u8 + final int decimals; + + @override + Map> toJson() => { + 'set_metadata': {'id': id, 'name': name, 'symbol': symbol, 'decimals': decimals}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i1.U8SequenceCodec.codec.sizeHint(name); + size = size + _i1.U8SequenceCodec.codec.sizeHint(symbol); + size = size + _i1.U8Codec.codec.sizeHint(decimals); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(17, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i1.U8SequenceCodec.codec.encodeTo(name, output); + _i1.U8SequenceCodec.codec.encodeTo(symbol, output); + _i1.U8Codec.codec.encodeTo(decimals, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SetMetadata && + other.id == id && + _i4.listsEqual(other.name, name) && + _i4.listsEqual(other.symbol, symbol) && + other.decimals == decimals; + + @override + int get hashCode => Object.hash(id, name, symbol, decimals); +} + +/// Clear the metadata for an asset. +/// +/// Origin must be Signed and the sender should be the Owner of the asset `id`. +/// +/// Any deposit is freed for the asset owner. +/// +/// - `id`: The identifier of the asset to clear. +/// +/// Emits `MetadataCleared`. +/// +/// Weight: `O(1)` +class ClearMetadata extends Call { + const ClearMetadata({required this.id}); + + factory ClearMetadata._decode(_i1.Input input) { + return ClearMetadata(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'clear_metadata': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(18, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ClearMetadata && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Force the metadata for an asset to some value. +/// +/// Origin must be ForceOrigin. +/// +/// Any deposit is left alone. +/// +/// - `id`: The identifier of the asset to update. +/// - `name`: The user friendly name of this asset. Limited in length by `StringLimit`. +/// - `symbol`: The exchange symbol for this asset. Limited in length by `StringLimit`. +/// - `decimals`: The number of decimals this asset uses to represent one unit. +/// +/// Emits `MetadataSet`. +/// +/// Weight: `O(N + S)` where N and S are the length of the name and symbol respectively. +class ForceSetMetadata extends Call { + const ForceSetMetadata({ + required this.id, + required this.name, + required this.symbol, + required this.decimals, + required this.isFrozen, + }); + + factory ForceSetMetadata._decode(_i1.Input input) { + return ForceSetMetadata( + id: _i1.CompactBigIntCodec.codec.decode(input), + name: _i1.U8SequenceCodec.codec.decode(input), + symbol: _i1.U8SequenceCodec.codec.decode(input), + decimals: _i1.U8Codec.codec.decode(input), + isFrozen: _i1.BoolCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// Vec + final List name; + + /// Vec + final List symbol; + + /// u8 + final int decimals; + + /// bool + final bool isFrozen; + + @override + Map> toJson() => { + 'force_set_metadata': {'id': id, 'name': name, 'symbol': symbol, 'decimals': decimals, 'isFrozen': isFrozen}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i1.U8SequenceCodec.codec.sizeHint(name); + size = size + _i1.U8SequenceCodec.codec.sizeHint(symbol); + size = size + _i1.U8Codec.codec.sizeHint(decimals); + size = size + _i1.BoolCodec.codec.sizeHint(isFrozen); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(19, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i1.U8SequenceCodec.codec.encodeTo(name, output); + _i1.U8SequenceCodec.codec.encodeTo(symbol, output); + _i1.U8Codec.codec.encodeTo(decimals, output); + _i1.BoolCodec.codec.encodeTo(isFrozen, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ForceSetMetadata && + other.id == id && + _i4.listsEqual(other.name, name) && + _i4.listsEqual(other.symbol, symbol) && + other.decimals == decimals && + other.isFrozen == isFrozen; + + @override + int get hashCode => Object.hash(id, name, symbol, decimals, isFrozen); +} + +/// Clear the metadata for an asset. +/// +/// Origin must be ForceOrigin. +/// +/// Any deposit is returned. +/// +/// - `id`: The identifier of the asset to clear. +/// +/// Emits `MetadataCleared`. +/// +/// Weight: `O(1)` +class ForceClearMetadata extends Call { + const ForceClearMetadata({required this.id}); + + factory ForceClearMetadata._decode(_i1.Input input) { + return ForceClearMetadata(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'force_clear_metadata': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(20, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ForceClearMetadata && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Alter the attributes of a given asset. +/// +/// Origin must be `ForceOrigin`. +/// +/// - `id`: The identifier of the asset. +/// - `owner`: The new Owner of this asset. +/// - `issuer`: The new Issuer of this asset. +/// - `admin`: The new Admin of this asset. +/// - `freezer`: The new Freezer of this asset. +/// - `min_balance`: The minimum balance of this new asset that any single account must +/// have. If an account's balance is reduced below this, then it collapses to zero. +/// - `is_sufficient`: Whether a non-zero balance of this asset is deposit of sufficient +/// value to account for the state bloat associated with its balance storage. If set to +/// `true`, then non-zero balances may be stored without a `consumer` reference (and thus +/// an ED in the Balances pallet or whatever else is used to control user-account state +/// growth). +/// - `is_frozen`: Whether this asset class is frozen except for permissioned/admin +/// instructions. +/// +/// Emits `AssetStatusChanged` with the identity of the asset. +/// +/// Weight: `O(1)` +class ForceAssetStatus extends Call { + const ForceAssetStatus({ + required this.id, + required this.owner, + required this.issuer, + required this.admin, + required this.freezer, + required this.minBalance, + required this.isSufficient, + required this.isFrozen, + }); + + factory ForceAssetStatus._decode(_i1.Input input) { + return ForceAssetStatus( + id: _i1.CompactBigIntCodec.codec.decode(input), + owner: _i3.MultiAddress.codec.decode(input), + issuer: _i3.MultiAddress.codec.decode(input), + admin: _i3.MultiAddress.codec.decode(input), + freezer: _i3.MultiAddress.codec.decode(input), + minBalance: _i1.CompactBigIntCodec.codec.decode(input), + isSufficient: _i1.BoolCodec.codec.decode(input), + isFrozen: _i1.BoolCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress owner; + + /// AccountIdLookupOf + final _i3.MultiAddress issuer; + + /// AccountIdLookupOf + final _i3.MultiAddress admin; + + /// AccountIdLookupOf + final _i3.MultiAddress freezer; + + /// T::Balance + final BigInt minBalance; + + /// bool + final bool isSufficient; + + /// bool + final bool isFrozen; + + @override + Map> toJson() => { + 'force_asset_status': { + 'id': id, + 'owner': owner.toJson(), + 'issuer': issuer.toJson(), + 'admin': admin.toJson(), + 'freezer': freezer.toJson(), + 'minBalance': minBalance, + 'isSufficient': isSufficient, + 'isFrozen': isFrozen, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(owner); + size = size + _i3.MultiAddress.codec.sizeHint(issuer); + size = size + _i3.MultiAddress.codec.sizeHint(admin); + size = size + _i3.MultiAddress.codec.sizeHint(freezer); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(minBalance); + size = size + _i1.BoolCodec.codec.sizeHint(isSufficient); + size = size + _i1.BoolCodec.codec.sizeHint(isFrozen); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(21, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(owner, output); + _i3.MultiAddress.codec.encodeTo(issuer, output); + _i3.MultiAddress.codec.encodeTo(admin, output); + _i3.MultiAddress.codec.encodeTo(freezer, output); + _i1.CompactBigIntCodec.codec.encodeTo(minBalance, output); + _i1.BoolCodec.codec.encodeTo(isSufficient, output); + _i1.BoolCodec.codec.encodeTo(isFrozen, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ForceAssetStatus && + other.id == id && + other.owner == owner && + other.issuer == issuer && + other.admin == admin && + other.freezer == freezer && + other.minBalance == minBalance && + other.isSufficient == isSufficient && + other.isFrozen == isFrozen; + + @override + int get hashCode => Object.hash(id, owner, issuer, admin, freezer, minBalance, isSufficient, isFrozen); +} + +/// Approve an amount of asset for transfer by a delegated third-party account. +/// +/// Origin must be Signed. +/// +/// Ensures that `ApprovalDeposit` worth of `Currency` is reserved from signing account +/// for the purpose of holding the approval. If some non-zero amount of assets is already +/// approved from signing account to `delegate`, then it is topped up or unreserved to +/// meet the right value. +/// +/// NOTE: The signing account does not need to own `amount` of assets at the point of +/// making this call. +/// +/// - `id`: The identifier of the asset. +/// - `delegate`: The account to delegate permission to transfer asset. +/// - `amount`: The amount of asset that may be transferred by `delegate`. If there is +/// already an approval in place, then this acts additively. +/// +/// Emits `ApprovedTransfer` on success. +/// +/// Weight: `O(1)` +class ApproveTransfer extends Call { + const ApproveTransfer({required this.id, required this.delegate, required this.amount}); + + factory ApproveTransfer._decode(_i1.Input input) { + return ApproveTransfer( + id: _i1.CompactBigIntCodec.codec.decode(input), + delegate: _i3.MultiAddress.codec.decode(input), + amount: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress delegate; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'approve_transfer': {'id': id, 'delegate': delegate.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(delegate); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(22, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(delegate, output); + _i1.CompactBigIntCodec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ApproveTransfer && other.id == id && other.delegate == delegate && other.amount == amount; + + @override + int get hashCode => Object.hash(id, delegate, amount); +} + +/// Cancel all of some asset approved for delegated transfer by a third-party account. +/// +/// Origin must be Signed and there must be an approval in place between signer and +/// `delegate`. +/// +/// Unreserves any deposit previously reserved by `approve_transfer` for the approval. +/// +/// - `id`: The identifier of the asset. +/// - `delegate`: The account delegated permission to transfer asset. +/// +/// Emits `ApprovalCancelled` on success. +/// +/// Weight: `O(1)` +class CancelApproval extends Call { + const CancelApproval({required this.id, required this.delegate}); + + factory CancelApproval._decode(_i1.Input input) { + return CancelApproval( + id: _i1.CompactBigIntCodec.codec.decode(input), + delegate: _i3.MultiAddress.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress delegate; + + @override + Map> toJson() => { + 'cancel_approval': {'id': id, 'delegate': delegate.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(delegate); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(23, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(delegate, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is CancelApproval && other.id == id && other.delegate == delegate; + + @override + int get hashCode => Object.hash(id, delegate); +} + +/// Cancel all of some asset approved for delegated transfer by a third-party account. +/// +/// Origin must be either ForceOrigin or Signed origin with the signer being the Admin +/// account of the asset `id`. +/// +/// Unreserves any deposit previously reserved by `approve_transfer` for the approval. +/// +/// - `id`: The identifier of the asset. +/// - `delegate`: The account delegated permission to transfer asset. +/// +/// Emits `ApprovalCancelled` on success. +/// +/// Weight: `O(1)` +class ForceCancelApproval extends Call { + const ForceCancelApproval({required this.id, required this.owner, required this.delegate}); + + factory ForceCancelApproval._decode(_i1.Input input) { + return ForceCancelApproval( + id: _i1.CompactBigIntCodec.codec.decode(input), + owner: _i3.MultiAddress.codec.decode(input), + delegate: _i3.MultiAddress.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress owner; + + /// AccountIdLookupOf + final _i3.MultiAddress delegate; + + @override + Map> toJson() => { + 'force_cancel_approval': {'id': id, 'owner': owner.toJson(), 'delegate': delegate.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(owner); + size = size + _i3.MultiAddress.codec.sizeHint(delegate); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(24, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(owner, output); + _i3.MultiAddress.codec.encodeTo(delegate, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ForceCancelApproval && other.id == id && other.owner == owner && other.delegate == delegate; + + @override + int get hashCode => Object.hash(id, owner, delegate); +} + +/// Transfer some asset balance from a previously delegated account to some third-party +/// account. +/// +/// Origin must be Signed and there must be an approval in place by the `owner` to the +/// signer. +/// +/// If the entire amount approved for transfer is transferred, then any deposit previously +/// reserved by `approve_transfer` is unreserved. +/// +/// - `id`: The identifier of the asset. +/// - `owner`: The account which previously approved for a transfer of at least `amount` and +/// from which the asset balance will be withdrawn. +/// - `destination`: The account to which the asset balance of `amount` will be transferred. +/// - `amount`: The amount of assets to transfer. +/// +/// Emits `TransferredApproved` on success. +/// +/// Weight: `O(1)` +class TransferApproved extends Call { + const TransferApproved({required this.id, required this.owner, required this.destination, required this.amount}); + + factory TransferApproved._decode(_i1.Input input) { + return TransferApproved( + id: _i1.CompactBigIntCodec.codec.decode(input), + owner: _i3.MultiAddress.codec.decode(input), + destination: _i3.MultiAddress.codec.decode(input), + amount: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress owner; + + /// AccountIdLookupOf + final _i3.MultiAddress destination; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'transfer_approved': {'id': id, 'owner': owner.toJson(), 'destination': destination.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(owner); + size = size + _i3.MultiAddress.codec.sizeHint(destination); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(25, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(owner, output); + _i3.MultiAddress.codec.encodeTo(destination, output); + _i1.CompactBigIntCodec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TransferApproved && + other.id == id && + other.owner == owner && + other.destination == destination && + other.amount == amount; + + @override + int get hashCode => Object.hash(id, owner, destination, amount); +} + +/// Create an asset account for non-provider assets. +/// +/// A deposit will be taken from the signer account. +/// +/// - `origin`: Must be Signed; the signer account must have sufficient funds for a deposit +/// to be taken. +/// - `id`: The identifier of the asset for the account to be created. +/// +/// Emits `Touched` event when successful. +class Touch extends Call { + const Touch({required this.id}); + + factory Touch._decode(_i1.Input input) { + return Touch(id: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + @override + Map> toJson() => { + 'touch': {'id': id}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(26, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Touch && other.id == id; + + @override + int get hashCode => id.hashCode; +} + +/// Return the deposit (if any) of an asset account or a consumer reference (if any) of an +/// account. +/// +/// The origin must be Signed. +/// +/// - `id`: The identifier of the asset for which the caller would like the deposit +/// refunded. +/// - `allow_burn`: If `true` then assets may be destroyed in order to complete the refund. +/// +/// It will fail with either [`Error::ContainsHolds`] or [`Error::ContainsFreezes`] if +/// the asset account contains holds or freezes in place. +/// +/// Emits `Refunded` event when successful. +class Refund extends Call { + const Refund({required this.id, required this.allowBurn}); + + factory Refund._decode(_i1.Input input) { + return Refund(id: _i1.CompactBigIntCodec.codec.decode(input), allowBurn: _i1.BoolCodec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + /// bool + final bool allowBurn; + + @override + Map> toJson() => { + 'refund': {'id': id, 'allowBurn': allowBurn}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i1.BoolCodec.codec.sizeHint(allowBurn); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(27, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i1.BoolCodec.codec.encodeTo(allowBurn, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Refund && other.id == id && other.allowBurn == allowBurn; + + @override + int get hashCode => Object.hash(id, allowBurn); +} + +/// Sets the minimum balance of an asset. +/// +/// Only works if there aren't any accounts that are holding the asset or if +/// the new value of `min_balance` is less than the old one. +/// +/// Origin must be Signed and the sender has to be the Owner of the +/// asset `id`. +/// +/// - `id`: The identifier of the asset. +/// - `min_balance`: The new value of `min_balance`. +/// +/// Emits `AssetMinBalanceChanged` event when successful. +class SetMinBalance extends Call { + const SetMinBalance({required this.id, required this.minBalance}); + + factory SetMinBalance._decode(_i1.Input input) { + return SetMinBalance(id: _i1.CompactBigIntCodec.codec.decode(input), minBalance: _i1.U128Codec.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + /// T::Balance + final BigInt minBalance; + + @override + Map> toJson() => { + 'set_min_balance': {'id': id, 'minBalance': minBalance}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i1.U128Codec.codec.sizeHint(minBalance); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(28, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i1.U128Codec.codec.encodeTo(minBalance, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is SetMinBalance && other.id == id && other.minBalance == minBalance; + + @override + int get hashCode => Object.hash(id, minBalance); +} + +/// Create an asset account for `who`. +/// +/// A deposit will be taken from the signer account. +/// +/// - `origin`: Must be Signed by `Freezer` or `Admin` of the asset `id`; the signer account +/// must have sufficient funds for a deposit to be taken. +/// - `id`: The identifier of the asset for the account to be created. +/// - `who`: The account to be created. +/// +/// Emits `Touched` event when successful. +class TouchOther extends Call { + const TouchOther({required this.id, required this.who}); + + factory TouchOther._decode(_i1.Input input) { + return TouchOther(id: _i1.CompactBigIntCodec.codec.decode(input), who: _i3.MultiAddress.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + @override + Map> toJson() => { + 'touch_other': {'id': id, 'who': who.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(29, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TouchOther && other.id == id && other.who == who; + + @override + int get hashCode => Object.hash(id, who); +} + +/// Return the deposit (if any) of a target asset account. Useful if you are the depositor. +/// +/// The origin must be Signed and either the account owner, depositor, or asset `Admin`. In +/// order to burn a non-zero balance of the asset, the caller must be the account and should +/// use `refund`. +/// +/// - `id`: The identifier of the asset for the account holding a deposit. +/// - `who`: The account to refund. +/// +/// It will fail with either [`Error::ContainsHolds`] or [`Error::ContainsFreezes`] if +/// the asset account contains holds or freezes in place. +/// +/// Emits `Refunded` event when successful. +class RefundOther extends Call { + const RefundOther({required this.id, required this.who}); + + factory RefundOther._decode(_i1.Input input) { + return RefundOther(id: _i1.CompactBigIntCodec.codec.decode(input), who: _i3.MultiAddress.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + @override + Map> toJson() => { + 'refund_other': {'id': id, 'who': who.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(30, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(who, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RefundOther && other.id == id && other.who == who; + + @override + int get hashCode => Object.hash(id, who); +} + +/// Disallow further unprivileged transfers of an asset `id` to and from an account `who`. +/// +/// Origin must be Signed and the sender should be the Freezer of the asset `id`. +/// +/// - `id`: The identifier of the account's asset. +/// - `who`: The account to be unblocked. +/// +/// Emits `Blocked`. +/// +/// Weight: `O(1)` +class Block extends Call { + const Block({required this.id, required this.who}); + + factory Block._decode(_i1.Input input) { + return Block(id: _i1.CompactBigIntCodec.codec.decode(input), who: _i3.MultiAddress.codec.decode(input)); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + @override + Map> toJson() => { + 'block': {'id': id, 'who': who.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(31, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Block && other.id == id && other.who == who; + + @override + int get hashCode => Object.hash(id, who); +} + +/// Transfer the entire transferable balance from the caller asset account. +/// +/// NOTE: This function only attempts to transfer _transferable_ balances. This means that +/// any held, frozen, or minimum balance (when `keep_alive` is `true`), will not be +/// transferred by this function. To ensure that this function results in a killed account, +/// you might need to prepare the account by removing any reference counters, storage +/// deposits, etc... +/// +/// The dispatch origin of this call must be Signed. +/// +/// - `id`: The identifier of the asset for the account holding a deposit. +/// - `dest`: The recipient of the transfer. +/// - `keep_alive`: A boolean to determine if the `transfer_all` operation should send all +/// of the funds the asset account has, causing the sender asset account to be killed +/// (false), or transfer everything except at least the minimum balance, which will +/// guarantee to keep the sender asset account alive (true). +class TransferAll extends Call { + const TransferAll({required this.id, required this.dest, required this.keepAlive}); + + factory TransferAll._decode(_i1.Input input) { + return TransferAll( + id: _i1.CompactBigIntCodec.codec.decode(input), + dest: _i3.MultiAddress.codec.decode(input), + keepAlive: _i1.BoolCodec.codec.decode(input), + ); + } + + /// T::AssetIdParameter + final BigInt id; + + /// AccountIdLookupOf + final _i3.MultiAddress dest; + + /// bool + final bool keepAlive; + + @override + Map> toJson() => { + 'transfer_all': {'id': id, 'dest': dest.toJson(), 'keepAlive': keepAlive}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(id); + size = size + _i3.MultiAddress.codec.sizeHint(dest); + size = size + _i1.BoolCodec.codec.sizeHint(keepAlive); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(32, output); + _i1.CompactBigIntCodec.codec.encodeTo(id, output); + _i3.MultiAddress.codec.encodeTo(dest, output); + _i1.BoolCodec.codec.encodeTo(keepAlive, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TransferAll && other.id == id && other.dest == dest && other.keepAlive == keepAlive; + + @override + int get hashCode => Object.hash(id, dest, keepAlive); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/error.dart new file mode 100644 index 00000000..85ddf92c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/error.dart @@ -0,0 +1,161 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Account balance must be greater than or equal to the transfer amount. + balanceLow('BalanceLow', 0), + + /// The account to alter does not exist. + noAccount('NoAccount', 1), + + /// The signing account has no permission to do the operation. + noPermission('NoPermission', 2), + + /// The given asset ID is unknown. + unknown('Unknown', 3), + + /// The origin account is frozen. + frozen('Frozen', 4), + + /// The asset ID is already taken. + inUse('InUse', 5), + + /// Invalid witness data given. + badWitness('BadWitness', 6), + + /// Minimum balance should be non-zero. + minBalanceZero('MinBalanceZero', 7), + + /// Unable to increment the consumer reference counters on the account. Either no provider + /// reference exists to allow a non-zero balance of a non-self-sufficient asset, or one + /// fewer then the maximum number of consumers has been reached. + unavailableConsumer('UnavailableConsumer', 8), + + /// Invalid metadata given. + badMetadata('BadMetadata', 9), + + /// No approval exists that would allow the transfer. + unapproved('Unapproved', 10), + + /// The source account would not survive the transfer and it needs to stay alive. + wouldDie('WouldDie', 11), + + /// The asset-account already exists. + alreadyExists('AlreadyExists', 12), + + /// The asset-account doesn't have an associated deposit. + noDeposit('NoDeposit', 13), + + /// The operation would result in funds being burned. + wouldBurn('WouldBurn', 14), + + /// The asset is a live asset and is actively being used. Usually emit for operations such + /// as `start_destroy` which require the asset to be in a destroying state. + liveAsset('LiveAsset', 15), + + /// The asset is not live, and likely being destroyed. + assetNotLive('AssetNotLive', 16), + + /// The asset status is not the expected status. + incorrectStatus('IncorrectStatus', 17), + + /// The asset should be frozen before the given operation. + notFrozen('NotFrozen', 18), + + /// Callback action resulted in error + callbackFailed('CallbackFailed', 19), + + /// The asset ID must be equal to the [`NextAssetId`]. + badAssetId('BadAssetId', 20), + + /// The asset cannot be destroyed because some accounts for this asset contain freezes. + containsFreezes('ContainsFreezes', 21), + + /// The asset cannot be destroyed because some accounts for this asset contain holds. + containsHolds('ContainsHolds', 22); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.balanceLow; + case 1: + return Error.noAccount; + case 2: + return Error.noPermission; + case 3: + return Error.unknown; + case 4: + return Error.frozen; + case 5: + return Error.inUse; + case 6: + return Error.badWitness; + case 7: + return Error.minBalanceZero; + case 8: + return Error.unavailableConsumer; + case 9: + return Error.badMetadata; + case 10: + return Error.unapproved; + case 11: + return Error.wouldDie; + case 12: + return Error.alreadyExists; + case 13: + return Error.noDeposit; + case 14: + return Error.wouldBurn; + case 15: + return Error.liveAsset; + case 16: + return Error.assetNotLive; + case 17: + return Error.incorrectStatus; + case 18: + return Error.notFrozen; + case 19: + return Error.callbackFailed; + case 20: + return Error.badAssetId; + case 21: + return Error.containsFreezes; + case 22: + return Error.containsHolds; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/event.dart new file mode 100644 index 00000000..ef0de033 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/pallet/event.dart @@ -0,0 +1,1668 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + Created created({required int assetId, required _i3.AccountId32 creator, required _i3.AccountId32 owner}) { + return Created(assetId: assetId, creator: creator, owner: owner); + } + + Issued issued({required int assetId, required _i3.AccountId32 owner, required BigInt amount}) { + return Issued(assetId: assetId, owner: owner, amount: amount); + } + + Transferred transferred({ + required int assetId, + required _i3.AccountId32 from, + required _i3.AccountId32 to, + required BigInt amount, + }) { + return Transferred(assetId: assetId, from: from, to: to, amount: amount); + } + + Burned burned({required int assetId, required _i3.AccountId32 owner, required BigInt balance}) { + return Burned(assetId: assetId, owner: owner, balance: balance); + } + + TeamChanged teamChanged({ + required int assetId, + required _i3.AccountId32 issuer, + required _i3.AccountId32 admin, + required _i3.AccountId32 freezer, + }) { + return TeamChanged(assetId: assetId, issuer: issuer, admin: admin, freezer: freezer); + } + + OwnerChanged ownerChanged({required int assetId, required _i3.AccountId32 owner}) { + return OwnerChanged(assetId: assetId, owner: owner); + } + + Frozen frozen({required int assetId, required _i3.AccountId32 who}) { + return Frozen(assetId: assetId, who: who); + } + + Thawed thawed({required int assetId, required _i3.AccountId32 who}) { + return Thawed(assetId: assetId, who: who); + } + + AssetFrozen assetFrozen({required int assetId}) { + return AssetFrozen(assetId: assetId); + } + + AssetThawed assetThawed({required int assetId}) { + return AssetThawed(assetId: assetId); + } + + AccountsDestroyed accountsDestroyed({ + required int assetId, + required int accountsDestroyed, + required int accountsRemaining, + }) { + return AccountsDestroyed( + assetId: assetId, + accountsDestroyed: accountsDestroyed, + accountsRemaining: accountsRemaining, + ); + } + + ApprovalsDestroyed approvalsDestroyed({ + required int assetId, + required int approvalsDestroyed, + required int approvalsRemaining, + }) { + return ApprovalsDestroyed( + assetId: assetId, + approvalsDestroyed: approvalsDestroyed, + approvalsRemaining: approvalsRemaining, + ); + } + + DestructionStarted destructionStarted({required int assetId}) { + return DestructionStarted(assetId: assetId); + } + + Destroyed destroyed({required int assetId}) { + return Destroyed(assetId: assetId); + } + + ForceCreated forceCreated({required int assetId, required _i3.AccountId32 owner}) { + return ForceCreated(assetId: assetId, owner: owner); + } + + MetadataSet metadataSet({ + required int assetId, + required List name, + required List symbol, + required int decimals, + required bool isFrozen, + }) { + return MetadataSet(assetId: assetId, name: name, symbol: symbol, decimals: decimals, isFrozen: isFrozen); + } + + MetadataCleared metadataCleared({required int assetId}) { + return MetadataCleared(assetId: assetId); + } + + ApprovedTransfer approvedTransfer({ + required int assetId, + required _i3.AccountId32 source, + required _i3.AccountId32 delegate, + required BigInt amount, + }) { + return ApprovedTransfer(assetId: assetId, source: source, delegate: delegate, amount: amount); + } + + ApprovalCancelled approvalCancelled({ + required int assetId, + required _i3.AccountId32 owner, + required _i3.AccountId32 delegate, + }) { + return ApprovalCancelled(assetId: assetId, owner: owner, delegate: delegate); + } + + TransferredApproved transferredApproved({ + required int assetId, + required _i3.AccountId32 owner, + required _i3.AccountId32 delegate, + required _i3.AccountId32 destination, + required BigInt amount, + }) { + return TransferredApproved( + assetId: assetId, + owner: owner, + delegate: delegate, + destination: destination, + amount: amount, + ); + } + + AssetStatusChanged assetStatusChanged({required int assetId}) { + return AssetStatusChanged(assetId: assetId); + } + + AssetMinBalanceChanged assetMinBalanceChanged({required int assetId, required BigInt newMinBalance}) { + return AssetMinBalanceChanged(assetId: assetId, newMinBalance: newMinBalance); + } + + Touched touched({required int assetId, required _i3.AccountId32 who, required _i3.AccountId32 depositor}) { + return Touched(assetId: assetId, who: who, depositor: depositor); + } + + Blocked blocked({required int assetId, required _i3.AccountId32 who}) { + return Blocked(assetId: assetId, who: who); + } + + Deposited deposited({required int assetId, required _i3.AccountId32 who, required BigInt amount}) { + return Deposited(assetId: assetId, who: who, amount: amount); + } + + Withdrawn withdrawn({required int assetId, required _i3.AccountId32 who, required BigInt amount}) { + return Withdrawn(assetId: assetId, who: who, amount: amount); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Created._decode(input); + case 1: + return Issued._decode(input); + case 2: + return Transferred._decode(input); + case 3: + return Burned._decode(input); + case 4: + return TeamChanged._decode(input); + case 5: + return OwnerChanged._decode(input); + case 6: + return Frozen._decode(input); + case 7: + return Thawed._decode(input); + case 8: + return AssetFrozen._decode(input); + case 9: + return AssetThawed._decode(input); + case 10: + return AccountsDestroyed._decode(input); + case 11: + return ApprovalsDestroyed._decode(input); + case 12: + return DestructionStarted._decode(input); + case 13: + return Destroyed._decode(input); + case 14: + return ForceCreated._decode(input); + case 15: + return MetadataSet._decode(input); + case 16: + return MetadataCleared._decode(input); + case 17: + return ApprovedTransfer._decode(input); + case 18: + return ApprovalCancelled._decode(input); + case 19: + return TransferredApproved._decode(input); + case 20: + return AssetStatusChanged._decode(input); + case 21: + return AssetMinBalanceChanged._decode(input); + case 22: + return Touched._decode(input); + case 23: + return Blocked._decode(input); + case 24: + return Deposited._decode(input); + case 25: + return Withdrawn._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Created: + (value as Created).encodeTo(output); + break; + case Issued: + (value as Issued).encodeTo(output); + break; + case Transferred: + (value as Transferred).encodeTo(output); + break; + case Burned: + (value as Burned).encodeTo(output); + break; + case TeamChanged: + (value as TeamChanged).encodeTo(output); + break; + case OwnerChanged: + (value as OwnerChanged).encodeTo(output); + break; + case Frozen: + (value as Frozen).encodeTo(output); + break; + case Thawed: + (value as Thawed).encodeTo(output); + break; + case AssetFrozen: + (value as AssetFrozen).encodeTo(output); + break; + case AssetThawed: + (value as AssetThawed).encodeTo(output); + break; + case AccountsDestroyed: + (value as AccountsDestroyed).encodeTo(output); + break; + case ApprovalsDestroyed: + (value as ApprovalsDestroyed).encodeTo(output); + break; + case DestructionStarted: + (value as DestructionStarted).encodeTo(output); + break; + case Destroyed: + (value as Destroyed).encodeTo(output); + break; + case ForceCreated: + (value as ForceCreated).encodeTo(output); + break; + case MetadataSet: + (value as MetadataSet).encodeTo(output); + break; + case MetadataCleared: + (value as MetadataCleared).encodeTo(output); + break; + case ApprovedTransfer: + (value as ApprovedTransfer).encodeTo(output); + break; + case ApprovalCancelled: + (value as ApprovalCancelled).encodeTo(output); + break; + case TransferredApproved: + (value as TransferredApproved).encodeTo(output); + break; + case AssetStatusChanged: + (value as AssetStatusChanged).encodeTo(output); + break; + case AssetMinBalanceChanged: + (value as AssetMinBalanceChanged).encodeTo(output); + break; + case Touched: + (value as Touched).encodeTo(output); + break; + case Blocked: + (value as Blocked).encodeTo(output); + break; + case Deposited: + (value as Deposited).encodeTo(output); + break; + case Withdrawn: + (value as Withdrawn).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Created: + return (value as Created)._sizeHint(); + case Issued: + return (value as Issued)._sizeHint(); + case Transferred: + return (value as Transferred)._sizeHint(); + case Burned: + return (value as Burned)._sizeHint(); + case TeamChanged: + return (value as TeamChanged)._sizeHint(); + case OwnerChanged: + return (value as OwnerChanged)._sizeHint(); + case Frozen: + return (value as Frozen)._sizeHint(); + case Thawed: + return (value as Thawed)._sizeHint(); + case AssetFrozen: + return (value as AssetFrozen)._sizeHint(); + case AssetThawed: + return (value as AssetThawed)._sizeHint(); + case AccountsDestroyed: + return (value as AccountsDestroyed)._sizeHint(); + case ApprovalsDestroyed: + return (value as ApprovalsDestroyed)._sizeHint(); + case DestructionStarted: + return (value as DestructionStarted)._sizeHint(); + case Destroyed: + return (value as Destroyed)._sizeHint(); + case ForceCreated: + return (value as ForceCreated)._sizeHint(); + case MetadataSet: + return (value as MetadataSet)._sizeHint(); + case MetadataCleared: + return (value as MetadataCleared)._sizeHint(); + case ApprovedTransfer: + return (value as ApprovedTransfer)._sizeHint(); + case ApprovalCancelled: + return (value as ApprovalCancelled)._sizeHint(); + case TransferredApproved: + return (value as TransferredApproved)._sizeHint(); + case AssetStatusChanged: + return (value as AssetStatusChanged)._sizeHint(); + case AssetMinBalanceChanged: + return (value as AssetMinBalanceChanged)._sizeHint(); + case Touched: + return (value as Touched)._sizeHint(); + case Blocked: + return (value as Blocked)._sizeHint(); + case Deposited: + return (value as Deposited)._sizeHint(); + case Withdrawn: + return (value as Withdrawn)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Some asset class was created. +class Created extends Event { + const Created({required this.assetId, required this.creator, required this.owner}); + + factory Created._decode(_i1.Input input) { + return Created( + assetId: _i1.U32Codec.codec.decode(input), + creator: const _i1.U8ArrayCodec(32).decode(input), + owner: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 creator; + + /// T::AccountId + final _i3.AccountId32 owner; + + @override + Map> toJson() => { + 'Created': {'assetId': assetId, 'creator': creator.toList(), 'owner': owner.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(creator); + size = size + const _i3.AccountId32Codec().sizeHint(owner); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(creator, output); + const _i1.U8ArrayCodec(32).encodeTo(owner, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Created && + other.assetId == assetId && + _i4.listsEqual(other.creator, creator) && + _i4.listsEqual(other.owner, owner); + + @override + int get hashCode => Object.hash(assetId, creator, owner); +} + +/// Some assets were issued. +class Issued extends Event { + const Issued({required this.assetId, required this.owner, required this.amount}); + + factory Issued._decode(_i1.Input input) { + return Issued( + assetId: _i1.U32Codec.codec.decode(input), + owner: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 owner; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Issued': {'assetId': assetId, 'owner': owner.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(owner); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(owner, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Issued && other.assetId == assetId && _i4.listsEqual(other.owner, owner) && other.amount == amount; + + @override + int get hashCode => Object.hash(assetId, owner, amount); +} + +/// Some assets were transferred. +class Transferred extends Event { + const Transferred({required this.assetId, required this.from, required this.to, required this.amount}); + + factory Transferred._decode(_i1.Input input) { + return Transferred( + assetId: _i1.U32Codec.codec.decode(input), + from: const _i1.U8ArrayCodec(32).decode(input), + to: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 from; + + /// T::AccountId + final _i3.AccountId32 to; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Transferred': {'assetId': assetId, 'from': from.toList(), 'to': to.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(from); + size = size + const _i3.AccountId32Codec().sizeHint(to); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(from, output); + const _i1.U8ArrayCodec(32).encodeTo(to, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Transferred && + other.assetId == assetId && + _i4.listsEqual(other.from, from) && + _i4.listsEqual(other.to, to) && + other.amount == amount; + + @override + int get hashCode => Object.hash(assetId, from, to, amount); +} + +/// Some assets were destroyed. +class Burned extends Event { + const Burned({required this.assetId, required this.owner, required this.balance}); + + factory Burned._decode(_i1.Input input) { + return Burned( + assetId: _i1.U32Codec.codec.decode(input), + owner: const _i1.U8ArrayCodec(32).decode(input), + balance: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 owner; + + /// T::Balance + final BigInt balance; + + @override + Map> toJson() => { + 'Burned': {'assetId': assetId, 'owner': owner.toList(), 'balance': balance}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(owner); + size = size + _i1.U128Codec.codec.sizeHint(balance); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(owner, output); + _i1.U128Codec.codec.encodeTo(balance, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Burned && other.assetId == assetId && _i4.listsEqual(other.owner, owner) && other.balance == balance; + + @override + int get hashCode => Object.hash(assetId, owner, balance); +} + +/// The management team changed. +class TeamChanged extends Event { + const TeamChanged({required this.assetId, required this.issuer, required this.admin, required this.freezer}); + + factory TeamChanged._decode(_i1.Input input) { + return TeamChanged( + assetId: _i1.U32Codec.codec.decode(input), + issuer: const _i1.U8ArrayCodec(32).decode(input), + admin: const _i1.U8ArrayCodec(32).decode(input), + freezer: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 issuer; + + /// T::AccountId + final _i3.AccountId32 admin; + + /// T::AccountId + final _i3.AccountId32 freezer; + + @override + Map> toJson() => { + 'TeamChanged': { + 'assetId': assetId, + 'issuer': issuer.toList(), + 'admin': admin.toList(), + 'freezer': freezer.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(issuer); + size = size + const _i3.AccountId32Codec().sizeHint(admin); + size = size + const _i3.AccountId32Codec().sizeHint(freezer); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(issuer, output); + const _i1.U8ArrayCodec(32).encodeTo(admin, output); + const _i1.U8ArrayCodec(32).encodeTo(freezer, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TeamChanged && + other.assetId == assetId && + _i4.listsEqual(other.issuer, issuer) && + _i4.listsEqual(other.admin, admin) && + _i4.listsEqual(other.freezer, freezer); + + @override + int get hashCode => Object.hash(assetId, issuer, admin, freezer); +} + +/// The owner changed. +class OwnerChanged extends Event { + const OwnerChanged({required this.assetId, required this.owner}); + + factory OwnerChanged._decode(_i1.Input input) { + return OwnerChanged(assetId: _i1.U32Codec.codec.decode(input), owner: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 owner; + + @override + Map> toJson() => { + 'OwnerChanged': {'assetId': assetId, 'owner': owner.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(owner); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(owner, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is OwnerChanged && other.assetId == assetId && _i4.listsEqual(other.owner, owner); + + @override + int get hashCode => Object.hash(assetId, owner); +} + +/// Some account `who` was frozen. +class Frozen extends Event { + const Frozen({required this.assetId, required this.who}); + + factory Frozen._decode(_i1.Input input) { + return Frozen(assetId: _i1.U32Codec.codec.decode(input), who: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 who; + + @override + Map> toJson() => { + 'Frozen': {'assetId': assetId, 'who': who.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Frozen && other.assetId == assetId && _i4.listsEqual(other.who, who); + + @override + int get hashCode => Object.hash(assetId, who); +} + +/// Some account `who` was thawed. +class Thawed extends Event { + const Thawed({required this.assetId, required this.who}); + + factory Thawed._decode(_i1.Input input) { + return Thawed(assetId: _i1.U32Codec.codec.decode(input), who: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 who; + + @override + Map> toJson() => { + 'Thawed': {'assetId': assetId, 'who': who.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Thawed && other.assetId == assetId && _i4.listsEqual(other.who, who); + + @override + int get hashCode => Object.hash(assetId, who); +} + +/// Some asset `asset_id` was frozen. +class AssetFrozen extends Event { + const AssetFrozen({required this.assetId}); + + factory AssetFrozen._decode(_i1.Input input) { + return AssetFrozen(assetId: _i1.U32Codec.codec.decode(input)); + } + + /// T::AssetId + final int assetId; + + @override + Map> toJson() => { + 'AssetFrozen': {'assetId': assetId}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is AssetFrozen && other.assetId == assetId; + + @override + int get hashCode => assetId.hashCode; +} + +/// Some asset `asset_id` was thawed. +class AssetThawed extends Event { + const AssetThawed({required this.assetId}); + + factory AssetThawed._decode(_i1.Input input) { + return AssetThawed(assetId: _i1.U32Codec.codec.decode(input)); + } + + /// T::AssetId + final int assetId; + + @override + Map> toJson() => { + 'AssetThawed': {'assetId': assetId}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is AssetThawed && other.assetId == assetId; + + @override + int get hashCode => assetId.hashCode; +} + +/// Accounts were destroyed for given asset. +class AccountsDestroyed extends Event { + const AccountsDestroyed({required this.assetId, required this.accountsDestroyed, required this.accountsRemaining}); + + factory AccountsDestroyed._decode(_i1.Input input) { + return AccountsDestroyed( + assetId: _i1.U32Codec.codec.decode(input), + accountsDestroyed: _i1.U32Codec.codec.decode(input), + accountsRemaining: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// u32 + final int accountsDestroyed; + + /// u32 + final int accountsRemaining; + + @override + Map> toJson() => { + 'AccountsDestroyed': { + 'assetId': assetId, + 'accountsDestroyed': accountsDestroyed, + 'accountsRemaining': accountsRemaining, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i1.U32Codec.codec.sizeHint(accountsDestroyed); + size = size + _i1.U32Codec.codec.sizeHint(accountsRemaining); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i1.U32Codec.codec.encodeTo(accountsDestroyed, output); + _i1.U32Codec.codec.encodeTo(accountsRemaining, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AccountsDestroyed && + other.assetId == assetId && + other.accountsDestroyed == accountsDestroyed && + other.accountsRemaining == accountsRemaining; + + @override + int get hashCode => Object.hash(assetId, accountsDestroyed, accountsRemaining); +} + +/// Approvals were destroyed for given asset. +class ApprovalsDestroyed extends Event { + const ApprovalsDestroyed({required this.assetId, required this.approvalsDestroyed, required this.approvalsRemaining}); + + factory ApprovalsDestroyed._decode(_i1.Input input) { + return ApprovalsDestroyed( + assetId: _i1.U32Codec.codec.decode(input), + approvalsDestroyed: _i1.U32Codec.codec.decode(input), + approvalsRemaining: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// u32 + final int approvalsDestroyed; + + /// u32 + final int approvalsRemaining; + + @override + Map> toJson() => { + 'ApprovalsDestroyed': { + 'assetId': assetId, + 'approvalsDestroyed': approvalsDestroyed, + 'approvalsRemaining': approvalsRemaining, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i1.U32Codec.codec.sizeHint(approvalsDestroyed); + size = size + _i1.U32Codec.codec.sizeHint(approvalsRemaining); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i1.U32Codec.codec.encodeTo(approvalsDestroyed, output); + _i1.U32Codec.codec.encodeTo(approvalsRemaining, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ApprovalsDestroyed && + other.assetId == assetId && + other.approvalsDestroyed == approvalsDestroyed && + other.approvalsRemaining == approvalsRemaining; + + @override + int get hashCode => Object.hash(assetId, approvalsDestroyed, approvalsRemaining); +} + +/// An asset class is in the process of being destroyed. +class DestructionStarted extends Event { + const DestructionStarted({required this.assetId}); + + factory DestructionStarted._decode(_i1.Input input) { + return DestructionStarted(assetId: _i1.U32Codec.codec.decode(input)); + } + + /// T::AssetId + final int assetId; + + @override + Map> toJson() => { + 'DestructionStarted': {'assetId': assetId}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is DestructionStarted && other.assetId == assetId; + + @override + int get hashCode => assetId.hashCode; +} + +/// An asset class was destroyed. +class Destroyed extends Event { + const Destroyed({required this.assetId}); + + factory Destroyed._decode(_i1.Input input) { + return Destroyed(assetId: _i1.U32Codec.codec.decode(input)); + } + + /// T::AssetId + final int assetId; + + @override + Map> toJson() => { + 'Destroyed': {'assetId': assetId}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Destroyed && other.assetId == assetId; + + @override + int get hashCode => assetId.hashCode; +} + +/// Some asset class was force-created. +class ForceCreated extends Event { + const ForceCreated({required this.assetId, required this.owner}); + + factory ForceCreated._decode(_i1.Input input) { + return ForceCreated(assetId: _i1.U32Codec.codec.decode(input), owner: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 owner; + + @override + Map> toJson() => { + 'ForceCreated': {'assetId': assetId, 'owner': owner.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(owner); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(owner, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ForceCreated && other.assetId == assetId && _i4.listsEqual(other.owner, owner); + + @override + int get hashCode => Object.hash(assetId, owner); +} + +/// New metadata has been set for an asset. +class MetadataSet extends Event { + const MetadataSet({ + required this.assetId, + required this.name, + required this.symbol, + required this.decimals, + required this.isFrozen, + }); + + factory MetadataSet._decode(_i1.Input input) { + return MetadataSet( + assetId: _i1.U32Codec.codec.decode(input), + name: _i1.U8SequenceCodec.codec.decode(input), + symbol: _i1.U8SequenceCodec.codec.decode(input), + decimals: _i1.U8Codec.codec.decode(input), + isFrozen: _i1.BoolCodec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// Vec + final List name; + + /// Vec + final List symbol; + + /// u8 + final int decimals; + + /// bool + final bool isFrozen; + + @override + Map> toJson() => { + 'MetadataSet': {'assetId': assetId, 'name': name, 'symbol': symbol, 'decimals': decimals, 'isFrozen': isFrozen}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i1.U8SequenceCodec.codec.sizeHint(name); + size = size + _i1.U8SequenceCodec.codec.sizeHint(symbol); + size = size + _i1.U8Codec.codec.sizeHint(decimals); + size = size + _i1.BoolCodec.codec.sizeHint(isFrozen); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(15, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i1.U8SequenceCodec.codec.encodeTo(name, output); + _i1.U8SequenceCodec.codec.encodeTo(symbol, output); + _i1.U8Codec.codec.encodeTo(decimals, output); + _i1.BoolCodec.codec.encodeTo(isFrozen, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is MetadataSet && + other.assetId == assetId && + _i4.listsEqual(other.name, name) && + _i4.listsEqual(other.symbol, symbol) && + other.decimals == decimals && + other.isFrozen == isFrozen; + + @override + int get hashCode => Object.hash(assetId, name, symbol, decimals, isFrozen); +} + +/// Metadata has been cleared for an asset. +class MetadataCleared extends Event { + const MetadataCleared({required this.assetId}); + + factory MetadataCleared._decode(_i1.Input input) { + return MetadataCleared(assetId: _i1.U32Codec.codec.decode(input)); + } + + /// T::AssetId + final int assetId; + + @override + Map> toJson() => { + 'MetadataCleared': {'assetId': assetId}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(16, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is MetadataCleared && other.assetId == assetId; + + @override + int get hashCode => assetId.hashCode; +} + +/// (Additional) funds have been approved for transfer to a destination account. +class ApprovedTransfer extends Event { + const ApprovedTransfer({required this.assetId, required this.source, required this.delegate, required this.amount}); + + factory ApprovedTransfer._decode(_i1.Input input) { + return ApprovedTransfer( + assetId: _i1.U32Codec.codec.decode(input), + source: const _i1.U8ArrayCodec(32).decode(input), + delegate: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 source; + + /// T::AccountId + final _i3.AccountId32 delegate; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'ApprovedTransfer': { + 'assetId': assetId, + 'source': source.toList(), + 'delegate': delegate.toList(), + 'amount': amount, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(source); + size = size + const _i3.AccountId32Codec().sizeHint(delegate); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(17, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(source, output); + const _i1.U8ArrayCodec(32).encodeTo(delegate, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ApprovedTransfer && + other.assetId == assetId && + _i4.listsEqual(other.source, source) && + _i4.listsEqual(other.delegate, delegate) && + other.amount == amount; + + @override + int get hashCode => Object.hash(assetId, source, delegate, amount); +} + +/// An approval for account `delegate` was cancelled by `owner`. +class ApprovalCancelled extends Event { + const ApprovalCancelled({required this.assetId, required this.owner, required this.delegate}); + + factory ApprovalCancelled._decode(_i1.Input input) { + return ApprovalCancelled( + assetId: _i1.U32Codec.codec.decode(input), + owner: const _i1.U8ArrayCodec(32).decode(input), + delegate: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 owner; + + /// T::AccountId + final _i3.AccountId32 delegate; + + @override + Map> toJson() => { + 'ApprovalCancelled': {'assetId': assetId, 'owner': owner.toList(), 'delegate': delegate.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(owner); + size = size + const _i3.AccountId32Codec().sizeHint(delegate); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(18, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(owner, output); + const _i1.U8ArrayCodec(32).encodeTo(delegate, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ApprovalCancelled && + other.assetId == assetId && + _i4.listsEqual(other.owner, owner) && + _i4.listsEqual(other.delegate, delegate); + + @override + int get hashCode => Object.hash(assetId, owner, delegate); +} + +/// An `amount` was transferred in its entirety from `owner` to `destination` by +/// the approved `delegate`. +class TransferredApproved extends Event { + const TransferredApproved({ + required this.assetId, + required this.owner, + required this.delegate, + required this.destination, + required this.amount, + }); + + factory TransferredApproved._decode(_i1.Input input) { + return TransferredApproved( + assetId: _i1.U32Codec.codec.decode(input), + owner: const _i1.U8ArrayCodec(32).decode(input), + delegate: const _i1.U8ArrayCodec(32).decode(input), + destination: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 owner; + + /// T::AccountId + final _i3.AccountId32 delegate; + + /// T::AccountId + final _i3.AccountId32 destination; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'TransferredApproved': { + 'assetId': assetId, + 'owner': owner.toList(), + 'delegate': delegate.toList(), + 'destination': destination.toList(), + 'amount': amount, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(owner); + size = size + const _i3.AccountId32Codec().sizeHint(delegate); + size = size + const _i3.AccountId32Codec().sizeHint(destination); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(19, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(owner, output); + const _i1.U8ArrayCodec(32).encodeTo(delegate, output); + const _i1.U8ArrayCodec(32).encodeTo(destination, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TransferredApproved && + other.assetId == assetId && + _i4.listsEqual(other.owner, owner) && + _i4.listsEqual(other.delegate, delegate) && + _i4.listsEqual(other.destination, destination) && + other.amount == amount; + + @override + int get hashCode => Object.hash(assetId, owner, delegate, destination, amount); +} + +/// An asset has had its attributes changed by the `Force` origin. +class AssetStatusChanged extends Event { + const AssetStatusChanged({required this.assetId}); + + factory AssetStatusChanged._decode(_i1.Input input) { + return AssetStatusChanged(assetId: _i1.U32Codec.codec.decode(input)); + } + + /// T::AssetId + final int assetId; + + @override + Map> toJson() => { + 'AssetStatusChanged': {'assetId': assetId}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(20, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is AssetStatusChanged && other.assetId == assetId; + + @override + int get hashCode => assetId.hashCode; +} + +/// The min_balance of an asset has been updated by the asset owner. +class AssetMinBalanceChanged extends Event { + const AssetMinBalanceChanged({required this.assetId, required this.newMinBalance}); + + factory AssetMinBalanceChanged._decode(_i1.Input input) { + return AssetMinBalanceChanged( + assetId: _i1.U32Codec.codec.decode(input), + newMinBalance: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::Balance + final BigInt newMinBalance; + + @override + Map> toJson() => { + 'AssetMinBalanceChanged': {'assetId': assetId, 'newMinBalance': newMinBalance}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i1.U128Codec.codec.sizeHint(newMinBalance); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(21, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i1.U128Codec.codec.encodeTo(newMinBalance, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AssetMinBalanceChanged && other.assetId == assetId && other.newMinBalance == newMinBalance; + + @override + int get hashCode => Object.hash(assetId, newMinBalance); +} + +/// Some account `who` was created with a deposit from `depositor`. +class Touched extends Event { + const Touched({required this.assetId, required this.who, required this.depositor}); + + factory Touched._decode(_i1.Input input) { + return Touched( + assetId: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + depositor: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::AccountId + final _i3.AccountId32 depositor; + + @override + Map> toJson() => { + 'Touched': {'assetId': assetId, 'who': who.toList(), 'depositor': depositor.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + const _i3.AccountId32Codec().sizeHint(depositor); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(22, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + const _i1.U8ArrayCodec(32).encodeTo(depositor, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Touched && + other.assetId == assetId && + _i4.listsEqual(other.who, who) && + _i4.listsEqual(other.depositor, depositor); + + @override + int get hashCode => Object.hash(assetId, who, depositor); +} + +/// Some account `who` was blocked. +class Blocked extends Event { + const Blocked({required this.assetId, required this.who}); + + factory Blocked._decode(_i1.Input input) { + return Blocked(assetId: _i1.U32Codec.codec.decode(input), who: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 who; + + @override + Map> toJson() => { + 'Blocked': {'assetId': assetId, 'who': who.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(23, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Blocked && other.assetId == assetId && _i4.listsEqual(other.who, who); + + @override + int get hashCode => Object.hash(assetId, who); +} + +/// Some assets were deposited (e.g. for transaction fees). +class Deposited extends Event { + const Deposited({required this.assetId, required this.who, required this.amount}); + + factory Deposited._decode(_i1.Input input) { + return Deposited( + assetId: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Deposited': {'assetId': assetId, 'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(24, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Deposited && other.assetId == assetId && _i4.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(assetId, who, amount); +} + +/// Some assets were withdrawn from the account (e.g. for transaction fees). +class Withdrawn extends Event { + const Withdrawn({required this.assetId, required this.who, required this.amount}); + + factory Withdrawn._decode(_i1.Input input) { + return Withdrawn( + assetId: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AssetId + final int assetId; + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Withdrawn': {'assetId': assetId, 'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(25, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Withdrawn && other.assetId == assetId && _i4.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(assetId, who, amount); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/types/account_status.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/account_status.dart new file mode 100644 index 00000000..6ad2a4a4 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/account_status.dart @@ -0,0 +1,52 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum AccountStatus { + liquid('Liquid', 0), + frozen('Frozen', 1), + blocked('Blocked', 2); + + const AccountStatus(this.variantName, this.codecIndex); + + factory AccountStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $AccountStatusCodec codec = $AccountStatusCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $AccountStatusCodec with _i1.Codec { + const $AccountStatusCodec(); + + @override + AccountStatus decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return AccountStatus.liquid; + case 1: + return AccountStatus.frozen; + case 2: + return AccountStatus.blocked; + default: + throw Exception('AccountStatus: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(AccountStatus value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/types/approval.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/approval.dart new file mode 100644 index 00000000..ef7cbb93 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/approval.dart @@ -0,0 +1,56 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class Approval { + const Approval({required this.amount, required this.deposit}); + + factory Approval.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Balance + final BigInt amount; + + /// DepositBalance + final BigInt deposit; + + static const $ApprovalCodec codec = $ApprovalCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'amount': amount, 'deposit': deposit}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is Approval && other.amount == amount && other.deposit == deposit; + + @override + int get hashCode => Object.hash(amount, deposit); +} + +class $ApprovalCodec with _i1.Codec { + const $ApprovalCodec(); + + @override + void encodeTo(Approval obj, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(obj.amount, output); + _i1.U128Codec.codec.encodeTo(obj.deposit, output); + } + + @override + Approval decode(_i1.Input input) { + return Approval(amount: _i1.U128Codec.codec.decode(input), deposit: _i1.U128Codec.codec.decode(input)); + } + + @override + int sizeHint(Approval obj) { + int size = 0; + size = size + _i1.U128Codec.codec.sizeHint(obj.amount); + size = size + _i1.U128Codec.codec.sizeHint(obj.deposit); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_account.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_account.dart new file mode 100644 index 00000000..7002cc2c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_account.dart @@ -0,0 +1,84 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'account_status.dart' as _i2; +import 'existence_reason.dart' as _i3; + +class AssetAccount { + const AssetAccount({required this.balance, required this.status, required this.reason, required this.extra}); + + factory AssetAccount.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Balance + final BigInt balance; + + /// AccountStatus + final _i2.AccountStatus status; + + /// ExistenceReason + final _i3.ExistenceReason reason; + + /// Extra + final dynamic extra; + + static const $AssetAccountCodec codec = $AssetAccountCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'balance': balance, + 'status': status.toJson(), + 'reason': reason.toJson(), + 'extra': null, + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AssetAccount && + other.balance == balance && + other.status == status && + other.reason == reason && + other.extra == extra; + + @override + int get hashCode => Object.hash(balance, status, reason, extra); +} + +class $AssetAccountCodec with _i1.Codec { + const $AssetAccountCodec(); + + @override + void encodeTo(AssetAccount obj, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(obj.balance, output); + _i2.AccountStatus.codec.encodeTo(obj.status, output); + _i3.ExistenceReason.codec.encodeTo(obj.reason, output); + _i1.NullCodec.codec.encodeTo(obj.extra, output); + } + + @override + AssetAccount decode(_i1.Input input) { + return AssetAccount( + balance: _i1.U128Codec.codec.decode(input), + status: _i2.AccountStatus.codec.decode(input), + reason: _i3.ExistenceReason.codec.decode(input), + extra: _i1.NullCodec.codec.decode(input), + ); + } + + @override + int sizeHint(AssetAccount obj) { + int size = 0; + size = size + _i1.U128Codec.codec.sizeHint(obj.balance); + size = size + _i2.AccountStatus.codec.sizeHint(obj.status); + size = size + _i3.ExistenceReason.codec.sizeHint(obj.reason); + size = size + _i1.NullCodec.codec.sizeHint(obj.extra); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_details.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_details.dart new file mode 100644 index 00000000..9a721ed9 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_details.dart @@ -0,0 +1,175 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../../sp_core/crypto/account_id32.dart' as _i2; +import 'asset_status.dart' as _i3; + +class AssetDetails { + const AssetDetails({ + required this.owner, + required this.issuer, + required this.admin, + required this.freezer, + required this.supply, + required this.deposit, + required this.minBalance, + required this.isSufficient, + required this.accounts, + required this.sufficients, + required this.approvals, + required this.status, + }); + + factory AssetDetails.decode(_i1.Input input) { + return codec.decode(input); + } + + /// AccountId + final _i2.AccountId32 owner; + + /// AccountId + final _i2.AccountId32 issuer; + + /// AccountId + final _i2.AccountId32 admin; + + /// AccountId + final _i2.AccountId32 freezer; + + /// Balance + final BigInt supply; + + /// DepositBalance + final BigInt deposit; + + /// Balance + final BigInt minBalance; + + /// bool + final bool isSufficient; + + /// u32 + final int accounts; + + /// u32 + final int sufficients; + + /// u32 + final int approvals; + + /// AssetStatus + final _i3.AssetStatus status; + + static const $AssetDetailsCodec codec = $AssetDetailsCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'owner': owner.toList(), + 'issuer': issuer.toList(), + 'admin': admin.toList(), + 'freezer': freezer.toList(), + 'supply': supply, + 'deposit': deposit, + 'minBalance': minBalance, + 'isSufficient': isSufficient, + 'accounts': accounts, + 'sufficients': sufficients, + 'approvals': approvals, + 'status': status.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AssetDetails && + _i5.listsEqual(other.owner, owner) && + _i5.listsEqual(other.issuer, issuer) && + _i5.listsEqual(other.admin, admin) && + _i5.listsEqual(other.freezer, freezer) && + other.supply == supply && + other.deposit == deposit && + other.minBalance == minBalance && + other.isSufficient == isSufficient && + other.accounts == accounts && + other.sufficients == sufficients && + other.approvals == approvals && + other.status == status; + + @override + int get hashCode => Object.hash( + owner, + issuer, + admin, + freezer, + supply, + deposit, + minBalance, + isSufficient, + accounts, + sufficients, + approvals, + status, + ); +} + +class $AssetDetailsCodec with _i1.Codec { + const $AssetDetailsCodec(); + + @override + void encodeTo(AssetDetails obj, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(obj.owner, output); + const _i1.U8ArrayCodec(32).encodeTo(obj.issuer, output); + const _i1.U8ArrayCodec(32).encodeTo(obj.admin, output); + const _i1.U8ArrayCodec(32).encodeTo(obj.freezer, output); + _i1.U128Codec.codec.encodeTo(obj.supply, output); + _i1.U128Codec.codec.encodeTo(obj.deposit, output); + _i1.U128Codec.codec.encodeTo(obj.minBalance, output); + _i1.BoolCodec.codec.encodeTo(obj.isSufficient, output); + _i1.U32Codec.codec.encodeTo(obj.accounts, output); + _i1.U32Codec.codec.encodeTo(obj.sufficients, output); + _i1.U32Codec.codec.encodeTo(obj.approvals, output); + _i3.AssetStatus.codec.encodeTo(obj.status, output); + } + + @override + AssetDetails decode(_i1.Input input) { + return AssetDetails( + owner: const _i1.U8ArrayCodec(32).decode(input), + issuer: const _i1.U8ArrayCodec(32).decode(input), + admin: const _i1.U8ArrayCodec(32).decode(input), + freezer: const _i1.U8ArrayCodec(32).decode(input), + supply: _i1.U128Codec.codec.decode(input), + deposit: _i1.U128Codec.codec.decode(input), + minBalance: _i1.U128Codec.codec.decode(input), + isSufficient: _i1.BoolCodec.codec.decode(input), + accounts: _i1.U32Codec.codec.decode(input), + sufficients: _i1.U32Codec.codec.decode(input), + approvals: _i1.U32Codec.codec.decode(input), + status: _i3.AssetStatus.codec.decode(input), + ); + } + + @override + int sizeHint(AssetDetails obj) { + int size = 0; + size = size + const _i2.AccountId32Codec().sizeHint(obj.owner); + size = size + const _i2.AccountId32Codec().sizeHint(obj.issuer); + size = size + const _i2.AccountId32Codec().sizeHint(obj.admin); + size = size + const _i2.AccountId32Codec().sizeHint(obj.freezer); + size = size + _i1.U128Codec.codec.sizeHint(obj.supply); + size = size + _i1.U128Codec.codec.sizeHint(obj.deposit); + size = size + _i1.U128Codec.codec.sizeHint(obj.minBalance); + size = size + _i1.BoolCodec.codec.sizeHint(obj.isSufficient); + size = size + _i1.U32Codec.codec.sizeHint(obj.accounts); + size = size + _i1.U32Codec.codec.sizeHint(obj.sufficients); + size = size + _i1.U32Codec.codec.sizeHint(obj.approvals); + size = size + _i3.AssetStatus.codec.sizeHint(obj.status); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_metadata.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_metadata.dart new file mode 100644 index 00000000..45f1f10f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_metadata.dart @@ -0,0 +1,96 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i3; + +class AssetMetadata { + const AssetMetadata({ + required this.deposit, + required this.name, + required this.symbol, + required this.decimals, + required this.isFrozen, + }); + + factory AssetMetadata.decode(_i1.Input input) { + return codec.decode(input); + } + + /// DepositBalance + final BigInt deposit; + + /// BoundedString + final List name; + + /// BoundedString + final List symbol; + + /// u8 + final int decimals; + + /// bool + final bool isFrozen; + + static const $AssetMetadataCodec codec = $AssetMetadataCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'deposit': deposit, + 'name': name, + 'symbol': symbol, + 'decimals': decimals, + 'isFrozen': isFrozen, + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AssetMetadata && + other.deposit == deposit && + _i3.listsEqual(other.name, name) && + _i3.listsEqual(other.symbol, symbol) && + other.decimals == decimals && + other.isFrozen == isFrozen; + + @override + int get hashCode => Object.hash(deposit, name, symbol, decimals, isFrozen); +} + +class $AssetMetadataCodec with _i1.Codec { + const $AssetMetadataCodec(); + + @override + void encodeTo(AssetMetadata obj, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(obj.deposit, output); + _i1.U8SequenceCodec.codec.encodeTo(obj.name, output); + _i1.U8SequenceCodec.codec.encodeTo(obj.symbol, output); + _i1.U8Codec.codec.encodeTo(obj.decimals, output); + _i1.BoolCodec.codec.encodeTo(obj.isFrozen, output); + } + + @override + AssetMetadata decode(_i1.Input input) { + return AssetMetadata( + deposit: _i1.U128Codec.codec.decode(input), + name: _i1.U8SequenceCodec.codec.decode(input), + symbol: _i1.U8SequenceCodec.codec.decode(input), + decimals: _i1.U8Codec.codec.decode(input), + isFrozen: _i1.BoolCodec.codec.decode(input), + ); + } + + @override + int sizeHint(AssetMetadata obj) { + int size = 0; + size = size + _i1.U128Codec.codec.sizeHint(obj.deposit); + size = size + _i1.U8SequenceCodec.codec.sizeHint(obj.name); + size = size + _i1.U8SequenceCodec.codec.sizeHint(obj.symbol); + size = size + _i1.U8Codec.codec.sizeHint(obj.decimals); + size = size + _i1.BoolCodec.codec.sizeHint(obj.isFrozen); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_status.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_status.dart new file mode 100644 index 00000000..38f8219f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/asset_status.dart @@ -0,0 +1,52 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum AssetStatus { + live('Live', 0), + frozen('Frozen', 1), + destroying('Destroying', 2); + + const AssetStatus(this.variantName, this.codecIndex); + + factory AssetStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $AssetStatusCodec codec = $AssetStatusCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $AssetStatusCodec with _i1.Codec { + const $AssetStatusCodec(); + + @override + AssetStatus decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return AssetStatus.live; + case 1: + return AssetStatus.frozen; + case 2: + return AssetStatus.destroying; + default: + throw Exception('AssetStatus: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(AssetStatus value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets/types/existence_reason.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/existence_reason.dart new file mode 100644 index 00000000..d0714f82 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets/types/existence_reason.dart @@ -0,0 +1,240 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +abstract class ExistenceReason { + const ExistenceReason(); + + factory ExistenceReason.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $ExistenceReasonCodec codec = $ExistenceReasonCodec(); + + static const $ExistenceReason values = $ExistenceReason(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $ExistenceReason { + const $ExistenceReason(); + + Consumer consumer() { + return Consumer(); + } + + Sufficient sufficient() { + return Sufficient(); + } + + DepositHeld depositHeld(BigInt value0) { + return DepositHeld(value0); + } + + DepositRefunded depositRefunded() { + return DepositRefunded(); + } + + DepositFrom depositFrom(_i3.AccountId32 value0, BigInt value1) { + return DepositFrom(value0, value1); + } +} + +class $ExistenceReasonCodec with _i1.Codec { + const $ExistenceReasonCodec(); + + @override + ExistenceReason decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return const Consumer(); + case 1: + return const Sufficient(); + case 2: + return DepositHeld._decode(input); + case 3: + return const DepositRefunded(); + case 4: + return DepositFrom._decode(input); + default: + throw Exception('ExistenceReason: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(ExistenceReason value, _i1.Output output) { + switch (value.runtimeType) { + case Consumer: + (value as Consumer).encodeTo(output); + break; + case Sufficient: + (value as Sufficient).encodeTo(output); + break; + case DepositHeld: + (value as DepositHeld).encodeTo(output); + break; + case DepositRefunded: + (value as DepositRefunded).encodeTo(output); + break; + case DepositFrom: + (value as DepositFrom).encodeTo(output); + break; + default: + throw Exception('ExistenceReason: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(ExistenceReason value) { + switch (value.runtimeType) { + case Consumer: + return 1; + case Sufficient: + return 1; + case DepositHeld: + return (value as DepositHeld)._sizeHint(); + case DepositRefunded: + return 1; + case DepositFrom: + return (value as DepositFrom)._sizeHint(); + default: + throw Exception('ExistenceReason: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Consumer extends ExistenceReason { + const Consumer(); + + @override + Map toJson() => {'Consumer': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + } + + @override + bool operator ==(Object other) => other is Consumer; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Sufficient extends ExistenceReason { + const Sufficient(); + + @override + Map toJson() => {'Sufficient': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + } + + @override + bool operator ==(Object other) => other is Sufficient; + + @override + int get hashCode => runtimeType.hashCode; +} + +class DepositHeld extends ExistenceReason { + const DepositHeld(this.value0); + + factory DepositHeld._decode(_i1.Input input) { + return DepositHeld(_i1.U128Codec.codec.decode(input)); + } + + /// Balance + final BigInt value0; + + @override + Map toJson() => {'DepositHeld': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U128Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is DepositHeld && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class DepositRefunded extends ExistenceReason { + const DepositRefunded(); + + @override + Map toJson() => {'DepositRefunded': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + } + + @override + bool operator ==(Object other) => other is DepositRefunded; + + @override + int get hashCode => runtimeType.hashCode; +} + +class DepositFrom extends ExistenceReason { + const DepositFrom(this.value0, this.value1); + + factory DepositFrom._decode(_i1.Input input) { + return DepositFrom(const _i1.U8ArrayCodec(32).decode(input), _i1.U128Codec.codec.decode(input)); + } + + /// AccountId + final _i3.AccountId32 value0; + + /// Balance + final BigInt value1; + + @override + Map> toJson() => { + 'DepositFrom': [value0.toList(), value1], + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(value0); + size = size + _i1.U128Codec.codec.sizeHint(value1); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(value0, output); + _i1.U128Codec.codec.encodeTo(value1, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is DepositFrom && _i4.listsEqual(other.value0, value0) && other.value1 == value1; + + @override + int get hashCode => Object.hash(value0, value1); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets_holder/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets_holder/pallet/error.dart new file mode 100644 index 00000000..36edf4ea --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets_holder/pallet/error.dart @@ -0,0 +1,48 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Number of holds on an account would exceed the count of `RuntimeHoldReason`. + tooManyHolds('TooManyHolds', 0); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.tooManyHolds; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_assets_holder/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_assets_holder/pallet/event.dart new file mode 100644 index 00000000..1f214086 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_assets_holder/pallet/event.dart @@ -0,0 +1,294 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../../quantus_runtime/runtime_hold_reason.dart' as _i4; +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + Held held({ + required _i3.AccountId32 who, + required int assetId, + required _i4.RuntimeHoldReason reason, + required BigInt amount, + }) { + return Held(who: who, assetId: assetId, reason: reason, amount: amount); + } + + Released released({ + required _i3.AccountId32 who, + required int assetId, + required _i4.RuntimeHoldReason reason, + required BigInt amount, + }) { + return Released(who: who, assetId: assetId, reason: reason, amount: amount); + } + + Burned burned({ + required _i3.AccountId32 who, + required int assetId, + required _i4.RuntimeHoldReason reason, + required BigInt amount, + }) { + return Burned(who: who, assetId: assetId, reason: reason, amount: amount); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Held._decode(input); + case 1: + return Released._decode(input); + case 2: + return Burned._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Held: + (value as Held).encodeTo(output); + break; + case Released: + (value as Released).encodeTo(output); + break; + case Burned: + (value as Burned).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Held: + return (value as Held)._sizeHint(); + case Released: + return (value as Released)._sizeHint(); + case Burned: + return (value as Burned)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// `who`s balance on hold was increased by `amount`. +class Held extends Event { + const Held({required this.who, required this.assetId, required this.reason, required this.amount}); + + factory Held._decode(_i1.Input input) { + return Held( + who: const _i1.U8ArrayCodec(32).decode(input), + assetId: _i1.U32Codec.codec.decode(input), + reason: _i4.RuntimeHoldReason.codec.decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::AssetId + final int assetId; + + /// T::RuntimeHoldReason + final _i4.RuntimeHoldReason reason; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Held': {'who': who.toList(), 'assetId': assetId, 'reason': reason.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i4.RuntimeHoldReason.codec.sizeHint(reason); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i4.RuntimeHoldReason.codec.encodeTo(reason, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Held && + _i5.listsEqual(other.who, who) && + other.assetId == assetId && + other.reason == reason && + other.amount == amount; + + @override + int get hashCode => Object.hash(who, assetId, reason, amount); +} + +/// `who`s balance on hold was decreased by `amount`. +class Released extends Event { + const Released({required this.who, required this.assetId, required this.reason, required this.amount}); + + factory Released._decode(_i1.Input input) { + return Released( + who: const _i1.U8ArrayCodec(32).decode(input), + assetId: _i1.U32Codec.codec.decode(input), + reason: _i4.RuntimeHoldReason.codec.decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::AssetId + final int assetId; + + /// T::RuntimeHoldReason + final _i4.RuntimeHoldReason reason; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Released': {'who': who.toList(), 'assetId': assetId, 'reason': reason.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i4.RuntimeHoldReason.codec.sizeHint(reason); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i4.RuntimeHoldReason.codec.encodeTo(reason, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Released && + _i5.listsEqual(other.who, who) && + other.assetId == assetId && + other.reason == reason && + other.amount == amount; + + @override + int get hashCode => Object.hash(who, assetId, reason, amount); +} + +/// `who`s balance on hold was burned by `amount`. +class Burned extends Event { + const Burned({required this.who, required this.assetId, required this.reason, required this.amount}); + + factory Burned._decode(_i1.Input input) { + return Burned( + who: const _i1.U8ArrayCodec(32).decode(input), + assetId: _i1.U32Codec.codec.decode(input), + reason: _i4.RuntimeHoldReason.codec.decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::AssetId + final int assetId; + + /// T::RuntimeHoldReason + final _i4.RuntimeHoldReason reason; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Burned': {'who': who.toList(), 'assetId': assetId, 'reason': reason.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i4.RuntimeHoldReason.codec.sizeHint(reason); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i4.RuntimeHoldReason.codec.encodeTo(reason, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Burned && + _i5.listsEqual(other.who, who) && + other.assetId == assetId && + other.reason == reason && + other.amount == amount; + + @override + int get hashCode => Object.hash(who, assetId, reason, amount); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/call.dart new file mode 100644 index 00000000..0190e45b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/call.dart @@ -0,0 +1,598 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i6; + +import '../../sp_core/crypto/account_id32.dart' as _i4; +import '../../sp_runtime/multiaddress/multi_address.dart' as _i3; +import '../types/adjustment_direction.dart' as _i5; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + TransferAllowDeath transferAllowDeath({required _i3.MultiAddress dest, required BigInt value}) { + return TransferAllowDeath(dest: dest, value: value); + } + + ForceTransfer forceTransfer({ + required _i3.MultiAddress source, + required _i3.MultiAddress dest, + required BigInt value, + }) { + return ForceTransfer(source: source, dest: dest, value: value); + } + + TransferKeepAlive transferKeepAlive({required _i3.MultiAddress dest, required BigInt value}) { + return TransferKeepAlive(dest: dest, value: value); + } + + TransferAll transferAll({required _i3.MultiAddress dest, required bool keepAlive}) { + return TransferAll(dest: dest, keepAlive: keepAlive); + } + + ForceUnreserve forceUnreserve({required _i3.MultiAddress who, required BigInt amount}) { + return ForceUnreserve(who: who, amount: amount); + } + + UpgradeAccounts upgradeAccounts({required List<_i4.AccountId32> who}) { + return UpgradeAccounts(who: who); + } + + ForceSetBalance forceSetBalance({required _i3.MultiAddress who, required BigInt newFree}) { + return ForceSetBalance(who: who, newFree: newFree); + } + + ForceAdjustTotalIssuance forceAdjustTotalIssuance({ + required _i5.AdjustmentDirection direction, + required BigInt delta, + }) { + return ForceAdjustTotalIssuance(direction: direction, delta: delta); + } + + Burn burn({required BigInt value, required bool keepAlive}) { + return Burn(value: value, keepAlive: keepAlive); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return TransferAllowDeath._decode(input); + case 2: + return ForceTransfer._decode(input); + case 3: + return TransferKeepAlive._decode(input); + case 4: + return TransferAll._decode(input); + case 5: + return ForceUnreserve._decode(input); + case 6: + return UpgradeAccounts._decode(input); + case 8: + return ForceSetBalance._decode(input); + case 9: + return ForceAdjustTotalIssuance._decode(input); + case 10: + return Burn._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case TransferAllowDeath: + (value as TransferAllowDeath).encodeTo(output); + break; + case ForceTransfer: + (value as ForceTransfer).encodeTo(output); + break; + case TransferKeepAlive: + (value as TransferKeepAlive).encodeTo(output); + break; + case TransferAll: + (value as TransferAll).encodeTo(output); + break; + case ForceUnreserve: + (value as ForceUnreserve).encodeTo(output); + break; + case UpgradeAccounts: + (value as UpgradeAccounts).encodeTo(output); + break; + case ForceSetBalance: + (value as ForceSetBalance).encodeTo(output); + break; + case ForceAdjustTotalIssuance: + (value as ForceAdjustTotalIssuance).encodeTo(output); + break; + case Burn: + (value as Burn).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case TransferAllowDeath: + return (value as TransferAllowDeath)._sizeHint(); + case ForceTransfer: + return (value as ForceTransfer)._sizeHint(); + case TransferKeepAlive: + return (value as TransferKeepAlive)._sizeHint(); + case TransferAll: + return (value as TransferAll)._sizeHint(); + case ForceUnreserve: + return (value as ForceUnreserve)._sizeHint(); + case UpgradeAccounts: + return (value as UpgradeAccounts)._sizeHint(); + case ForceSetBalance: + return (value as ForceSetBalance)._sizeHint(); + case ForceAdjustTotalIssuance: + return (value as ForceAdjustTotalIssuance)._sizeHint(); + case Burn: + return (value as Burn)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Transfer some liquid free balance to another account. +/// +/// `transfer_allow_death` will set the `FreeBalance` of the sender and receiver. +/// If the sender's account is below the existential deposit as a result +/// of the transfer, the account will be reaped. +/// +/// The dispatch origin for this call must be `Signed` by the transactor. +class TransferAllowDeath extends Call { + const TransferAllowDeath({required this.dest, required this.value}); + + factory TransferAllowDeath._decode(_i1.Input input) { + return TransferAllowDeath( + dest: _i3.MultiAddress.codec.decode(input), + value: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// AccountIdLookupOf + final _i3.MultiAddress dest; + + /// T::Balance + final BigInt value; + + @override + Map> toJson() => { + 'transfer_allow_death': {'dest': dest.toJson(), 'value': value}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(dest); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(value); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.MultiAddress.codec.encodeTo(dest, output); + _i1.CompactBigIntCodec.codec.encodeTo(value, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TransferAllowDeath && other.dest == dest && other.value == value; + + @override + int get hashCode => Object.hash(dest, value); +} + +/// Exactly as `transfer_allow_death`, except the origin must be root and the source account +/// may be specified. +class ForceTransfer extends Call { + const ForceTransfer({required this.source, required this.dest, required this.value}); + + factory ForceTransfer._decode(_i1.Input input) { + return ForceTransfer( + source: _i3.MultiAddress.codec.decode(input), + dest: _i3.MultiAddress.codec.decode(input), + value: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// AccountIdLookupOf + final _i3.MultiAddress source; + + /// AccountIdLookupOf + final _i3.MultiAddress dest; + + /// T::Balance + final BigInt value; + + @override + Map> toJson() => { + 'force_transfer': {'source': source.toJson(), 'dest': dest.toJson(), 'value': value}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(source); + size = size + _i3.MultiAddress.codec.sizeHint(dest); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(value); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i3.MultiAddress.codec.encodeTo(source, output); + _i3.MultiAddress.codec.encodeTo(dest, output); + _i1.CompactBigIntCodec.codec.encodeTo(value, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ForceTransfer && other.source == source && other.dest == dest && other.value == value; + + @override + int get hashCode => Object.hash(source, dest, value); +} + +/// Same as the [`transfer_allow_death`] call, but with a check that the transfer will not +/// kill the origin account. +/// +/// 99% of the time you want [`transfer_allow_death`] instead. +/// +/// [`transfer_allow_death`]: struct.Pallet.html#method.transfer +class TransferKeepAlive extends Call { + const TransferKeepAlive({required this.dest, required this.value}); + + factory TransferKeepAlive._decode(_i1.Input input) { + return TransferKeepAlive( + dest: _i3.MultiAddress.codec.decode(input), + value: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// AccountIdLookupOf + final _i3.MultiAddress dest; + + /// T::Balance + final BigInt value; + + @override + Map> toJson() => { + 'transfer_keep_alive': {'dest': dest.toJson(), 'value': value}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(dest); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(value); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i3.MultiAddress.codec.encodeTo(dest, output); + _i1.CompactBigIntCodec.codec.encodeTo(value, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TransferKeepAlive && other.dest == dest && other.value == value; + + @override + int get hashCode => Object.hash(dest, value); +} + +/// Transfer the entire transferable balance from the caller account. +/// +/// NOTE: This function only attempts to transfer _transferable_ balances. This means that +/// any locked, reserved, or existential deposits (when `keep_alive` is `true`), will not be +/// transferred by this function. To ensure that this function results in a killed account, +/// you might need to prepare the account by removing any reference counters, storage +/// deposits, etc... +/// +/// The dispatch origin of this call must be Signed. +/// +/// - `dest`: The recipient of the transfer. +/// - `keep_alive`: A boolean to determine if the `transfer_all` operation should send all +/// of the funds the account has, causing the sender account to be killed (false), or +/// transfer everything except at least the existential deposit, which will guarantee to +/// keep the sender account alive (true). +class TransferAll extends Call { + const TransferAll({required this.dest, required this.keepAlive}); + + factory TransferAll._decode(_i1.Input input) { + return TransferAll(dest: _i3.MultiAddress.codec.decode(input), keepAlive: _i1.BoolCodec.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress dest; + + /// bool + final bool keepAlive; + + @override + Map> toJson() => { + 'transfer_all': {'dest': dest.toJson(), 'keepAlive': keepAlive}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(dest); + size = size + _i1.BoolCodec.codec.sizeHint(keepAlive); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i3.MultiAddress.codec.encodeTo(dest, output); + _i1.BoolCodec.codec.encodeTo(keepAlive, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TransferAll && other.dest == dest && other.keepAlive == keepAlive; + + @override + int get hashCode => Object.hash(dest, keepAlive); +} + +/// Unreserve some balance from a user by force. +/// +/// Can only be called by ROOT. +class ForceUnreserve extends Call { + const ForceUnreserve({required this.who, required this.amount}); + + factory ForceUnreserve._decode(_i1.Input input) { + return ForceUnreserve(who: _i3.MultiAddress.codec.decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'force_unreserve': {'who': who.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i3.MultiAddress.codec.encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ForceUnreserve && other.who == who && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Upgrade a specified account. +/// +/// - `origin`: Must be `Signed`. +/// - `who`: The account to be upgraded. +/// +/// This will waive the transaction fee if at least all but 10% of the accounts needed to +/// be upgraded. (We let some not have to be upgraded just in order to allow for the +/// possibility of churn). +class UpgradeAccounts extends Call { + const UpgradeAccounts({required this.who}); + + factory UpgradeAccounts._decode(_i1.Input input) { + return UpgradeAccounts(who: const _i1.SequenceCodec<_i4.AccountId32>(_i4.AccountId32Codec()).decode(input)); + } + + /// Vec + final List<_i4.AccountId32> who; + + @override + Map>>> toJson() => { + 'upgrade_accounts': {'who': who.map((value) => value.toList()).toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.SequenceCodec<_i4.AccountId32>(_i4.AccountId32Codec()).sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i1.SequenceCodec<_i4.AccountId32>(_i4.AccountId32Codec()).encodeTo(who, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is UpgradeAccounts && _i6.listsEqual(other.who, who); + + @override + int get hashCode => who.hashCode; +} + +/// Set the regular balance of a given account. +/// +/// The dispatch origin for this call is `root`. +class ForceSetBalance extends Call { + const ForceSetBalance({required this.who, required this.newFree}); + + factory ForceSetBalance._decode(_i1.Input input) { + return ForceSetBalance( + who: _i3.MultiAddress.codec.decode(input), + newFree: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + /// T::Balance + final BigInt newFree; + + @override + Map> toJson() => { + 'force_set_balance': {'who': who.toJson(), 'newFree': newFree}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(who); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(newFree); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i3.MultiAddress.codec.encodeTo(who, output); + _i1.CompactBigIntCodec.codec.encodeTo(newFree, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ForceSetBalance && other.who == who && other.newFree == newFree; + + @override + int get hashCode => Object.hash(who, newFree); +} + +/// Adjust the total issuance in a saturating way. +/// +/// Can only be called by root and always needs a positive `delta`. +/// +/// # Example +class ForceAdjustTotalIssuance extends Call { + const ForceAdjustTotalIssuance({required this.direction, required this.delta}); + + factory ForceAdjustTotalIssuance._decode(_i1.Input input) { + return ForceAdjustTotalIssuance( + direction: _i5.AdjustmentDirection.codec.decode(input), + delta: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + /// AdjustmentDirection + final _i5.AdjustmentDirection direction; + + /// T::Balance + final BigInt delta; + + @override + Map> toJson() => { + 'force_adjust_total_issuance': {'direction': direction.toJson(), 'delta': delta}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i5.AdjustmentDirection.codec.sizeHint(direction); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(delta); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i5.AdjustmentDirection.codec.encodeTo(direction, output); + _i1.CompactBigIntCodec.codec.encodeTo(delta, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ForceAdjustTotalIssuance && other.direction == direction && other.delta == delta; + + @override + int get hashCode => Object.hash(direction, delta); +} + +/// Burn the specified liquid free balance from the origin account. +/// +/// If the origin's account ends up below the existential deposit as a result +/// of the burn and `keep_alive` is false, the account will be reaped. +/// +/// Unlike sending funds to a _burn_ address, which merely makes the funds inaccessible, +/// this `burn` operation will reduce total issuance by the amount _burned_. +class Burn extends Call { + const Burn({required this.value, required this.keepAlive}); + + factory Burn._decode(_i1.Input input) { + return Burn(value: _i1.CompactBigIntCodec.codec.decode(input), keepAlive: _i1.BoolCodec.codec.decode(input)); + } + + /// T::Balance + final BigInt value; + + /// bool + final bool keepAlive; + + @override + Map> toJson() => { + 'burn': {'value': value, 'keepAlive': keepAlive}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(value); + size = size + _i1.BoolCodec.codec.sizeHint(keepAlive); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + _i1.CompactBigIntCodec.codec.encodeTo(value, output); + _i1.BoolCodec.codec.encodeTo(keepAlive, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Burn && other.value == value && other.keepAlive == keepAlive; + + @override + int get hashCode => Object.hash(value, keepAlive); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/error.dart new file mode 100644 index 00000000..3cc28b4e --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/error.dart @@ -0,0 +1,103 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Vesting balance too high to send value. + vestingBalance('VestingBalance', 0), + + /// Account liquidity restrictions prevent withdrawal. + liquidityRestrictions('LiquidityRestrictions', 1), + + /// Balance too low to send value. + insufficientBalance('InsufficientBalance', 2), + + /// Value too low to create account due to existential deposit. + existentialDeposit('ExistentialDeposit', 3), + + /// Transfer/payment would kill account. + expendability('Expendability', 4), + + /// A vesting schedule already exists for this account. + existingVestingSchedule('ExistingVestingSchedule', 5), + + /// Beneficiary account must pre-exist. + deadAccount('DeadAccount', 6), + + /// Number of named reserves exceed `MaxReserves`. + tooManyReserves('TooManyReserves', 7), + + /// Number of holds exceed `VariantCountOf`. + tooManyHolds('TooManyHolds', 8), + + /// Number of freezes exceed `MaxFreezes`. + tooManyFreezes('TooManyFreezes', 9), + + /// The issuance cannot be modified since it is already deactivated. + issuanceDeactivated('IssuanceDeactivated', 10), + + /// The delta cannot be zero. + deltaZero('DeltaZero', 11); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.vestingBalance; + case 1: + return Error.liquidityRestrictions; + case 2: + return Error.insufficientBalance; + case 3: + return Error.existentialDeposit; + case 4: + return Error.expendability; + case 5: + return Error.existingVestingSchedule; + case 6: + return Error.deadAccount; + case 7: + return Error.tooManyReserves; + case 8: + return Error.tooManyHolds; + case 9: + return Error.tooManyFreezes; + case 10: + return Error.issuanceDeactivated; + case 11: + return Error.deltaZero; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/event.dart new file mode 100644 index 00000000..590c12c9 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/pallet/event.dart @@ -0,0 +1,1218 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../../frame_support/traits/tokens/misc/balance_status.dart' as _i4; +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + Endowed endowed({required _i3.AccountId32 account, required BigInt freeBalance}) { + return Endowed(account: account, freeBalance: freeBalance); + } + + DustLost dustLost({required _i3.AccountId32 account, required BigInt amount}) { + return DustLost(account: account, amount: amount); + } + + Transfer transfer({required _i3.AccountId32 from, required _i3.AccountId32 to, required BigInt amount}) { + return Transfer(from: from, to: to, amount: amount); + } + + BalanceSet balanceSet({required _i3.AccountId32 who, required BigInt free}) { + return BalanceSet(who: who, free: free); + } + + Reserved reserved({required _i3.AccountId32 who, required BigInt amount}) { + return Reserved(who: who, amount: amount); + } + + Unreserved unreserved({required _i3.AccountId32 who, required BigInt amount}) { + return Unreserved(who: who, amount: amount); + } + + ReserveRepatriated reserveRepatriated({ + required _i3.AccountId32 from, + required _i3.AccountId32 to, + required BigInt amount, + required _i4.BalanceStatus destinationStatus, + }) { + return ReserveRepatriated(from: from, to: to, amount: amount, destinationStatus: destinationStatus); + } + + Deposit deposit({required _i3.AccountId32 who, required BigInt amount}) { + return Deposit(who: who, amount: amount); + } + + Withdraw withdraw({required _i3.AccountId32 who, required BigInt amount}) { + return Withdraw(who: who, amount: amount); + } + + Slashed slashed({required _i3.AccountId32 who, required BigInt amount}) { + return Slashed(who: who, amount: amount); + } + + Minted minted({required _i3.AccountId32 who, required BigInt amount}) { + return Minted(who: who, amount: amount); + } + + Burned burned({required _i3.AccountId32 who, required BigInt amount}) { + return Burned(who: who, amount: amount); + } + + Suspended suspended({required _i3.AccountId32 who, required BigInt amount}) { + return Suspended(who: who, amount: amount); + } + + Restored restored({required _i3.AccountId32 who, required BigInt amount}) { + return Restored(who: who, amount: amount); + } + + Upgraded upgraded({required _i3.AccountId32 who}) { + return Upgraded(who: who); + } + + Issued issued({required BigInt amount}) { + return Issued(amount: amount); + } + + Rescinded rescinded({required BigInt amount}) { + return Rescinded(amount: amount); + } + + Locked locked({required _i3.AccountId32 who, required BigInt amount}) { + return Locked(who: who, amount: amount); + } + + Unlocked unlocked({required _i3.AccountId32 who, required BigInt amount}) { + return Unlocked(who: who, amount: amount); + } + + Frozen frozen({required _i3.AccountId32 who, required BigInt amount}) { + return Frozen(who: who, amount: amount); + } + + Thawed thawed({required _i3.AccountId32 who, required BigInt amount}) { + return Thawed(who: who, amount: amount); + } + + TotalIssuanceForced totalIssuanceForced({required BigInt old, required BigInt new_}) { + return TotalIssuanceForced(old: old, new_: new_); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Endowed._decode(input); + case 1: + return DustLost._decode(input); + case 2: + return Transfer._decode(input); + case 3: + return BalanceSet._decode(input); + case 4: + return Reserved._decode(input); + case 5: + return Unreserved._decode(input); + case 6: + return ReserveRepatriated._decode(input); + case 7: + return Deposit._decode(input); + case 8: + return Withdraw._decode(input); + case 9: + return Slashed._decode(input); + case 10: + return Minted._decode(input); + case 11: + return Burned._decode(input); + case 12: + return Suspended._decode(input); + case 13: + return Restored._decode(input); + case 14: + return Upgraded._decode(input); + case 15: + return Issued._decode(input); + case 16: + return Rescinded._decode(input); + case 17: + return Locked._decode(input); + case 18: + return Unlocked._decode(input); + case 19: + return Frozen._decode(input); + case 20: + return Thawed._decode(input); + case 21: + return TotalIssuanceForced._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Endowed: + (value as Endowed).encodeTo(output); + break; + case DustLost: + (value as DustLost).encodeTo(output); + break; + case Transfer: + (value as Transfer).encodeTo(output); + break; + case BalanceSet: + (value as BalanceSet).encodeTo(output); + break; + case Reserved: + (value as Reserved).encodeTo(output); + break; + case Unreserved: + (value as Unreserved).encodeTo(output); + break; + case ReserveRepatriated: + (value as ReserveRepatriated).encodeTo(output); + break; + case Deposit: + (value as Deposit).encodeTo(output); + break; + case Withdraw: + (value as Withdraw).encodeTo(output); + break; + case Slashed: + (value as Slashed).encodeTo(output); + break; + case Minted: + (value as Minted).encodeTo(output); + break; + case Burned: + (value as Burned).encodeTo(output); + break; + case Suspended: + (value as Suspended).encodeTo(output); + break; + case Restored: + (value as Restored).encodeTo(output); + break; + case Upgraded: + (value as Upgraded).encodeTo(output); + break; + case Issued: + (value as Issued).encodeTo(output); + break; + case Rescinded: + (value as Rescinded).encodeTo(output); + break; + case Locked: + (value as Locked).encodeTo(output); + break; + case Unlocked: + (value as Unlocked).encodeTo(output); + break; + case Frozen: + (value as Frozen).encodeTo(output); + break; + case Thawed: + (value as Thawed).encodeTo(output); + break; + case TotalIssuanceForced: + (value as TotalIssuanceForced).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Endowed: + return (value as Endowed)._sizeHint(); + case DustLost: + return (value as DustLost)._sizeHint(); + case Transfer: + return (value as Transfer)._sizeHint(); + case BalanceSet: + return (value as BalanceSet)._sizeHint(); + case Reserved: + return (value as Reserved)._sizeHint(); + case Unreserved: + return (value as Unreserved)._sizeHint(); + case ReserveRepatriated: + return (value as ReserveRepatriated)._sizeHint(); + case Deposit: + return (value as Deposit)._sizeHint(); + case Withdraw: + return (value as Withdraw)._sizeHint(); + case Slashed: + return (value as Slashed)._sizeHint(); + case Minted: + return (value as Minted)._sizeHint(); + case Burned: + return (value as Burned)._sizeHint(); + case Suspended: + return (value as Suspended)._sizeHint(); + case Restored: + return (value as Restored)._sizeHint(); + case Upgraded: + return (value as Upgraded)._sizeHint(); + case Issued: + return (value as Issued)._sizeHint(); + case Rescinded: + return (value as Rescinded)._sizeHint(); + case Locked: + return (value as Locked)._sizeHint(); + case Unlocked: + return (value as Unlocked)._sizeHint(); + case Frozen: + return (value as Frozen)._sizeHint(); + case Thawed: + return (value as Thawed)._sizeHint(); + case TotalIssuanceForced: + return (value as TotalIssuanceForced)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// An account was created with some free balance. +class Endowed extends Event { + const Endowed({required this.account, required this.freeBalance}); + + factory Endowed._decode(_i1.Input input) { + return Endowed(account: const _i1.U8ArrayCodec(32).decode(input), freeBalance: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 account; + + /// T::Balance + final BigInt freeBalance; + + @override + Map> toJson() => { + 'Endowed': {'account': account.toList(), 'freeBalance': freeBalance}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(account); + size = size + _i1.U128Codec.codec.sizeHint(freeBalance); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(account, output); + _i1.U128Codec.codec.encodeTo(freeBalance, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Endowed && _i5.listsEqual(other.account, account) && other.freeBalance == freeBalance; + + @override + int get hashCode => Object.hash(account, freeBalance); +} + +/// An account was removed whose balance was non-zero but below ExistentialDeposit, +/// resulting in an outright loss. +class DustLost extends Event { + const DustLost({required this.account, required this.amount}); + + factory DustLost._decode(_i1.Input input) { + return DustLost(account: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 account; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'DustLost': {'account': account.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(account); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(account, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is DustLost && _i5.listsEqual(other.account, account) && other.amount == amount; + + @override + int get hashCode => Object.hash(account, amount); +} + +/// Transfer succeeded. +class Transfer extends Event { + const Transfer({required this.from, required this.to, required this.amount}); + + factory Transfer._decode(_i1.Input input) { + return Transfer( + from: const _i1.U8ArrayCodec(32).decode(input), + to: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 from; + + /// T::AccountId + final _i3.AccountId32 to; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Transfer': {'from': from.toList(), 'to': to.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(from); + size = size + const _i3.AccountId32Codec().sizeHint(to); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(from, output); + const _i1.U8ArrayCodec(32).encodeTo(to, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Transfer && _i5.listsEqual(other.from, from) && _i5.listsEqual(other.to, to) && other.amount == amount; + + @override + int get hashCode => Object.hash(from, to, amount); +} + +/// A balance was set by root. +class BalanceSet extends Event { + const BalanceSet({required this.who, required this.free}); + + factory BalanceSet._decode(_i1.Input input) { + return BalanceSet(who: const _i1.U8ArrayCodec(32).decode(input), free: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt free; + + @override + Map> toJson() => { + 'BalanceSet': {'who': who.toList(), 'free': free}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(free); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(free, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is BalanceSet && _i5.listsEqual(other.who, who) && other.free == free; + + @override + int get hashCode => Object.hash(who, free); +} + +/// Some balance was reserved (moved from free to reserved). +class Reserved extends Event { + const Reserved({required this.who, required this.amount}); + + factory Reserved._decode(_i1.Input input) { + return Reserved(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Reserved': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Reserved && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some balance was unreserved (moved from reserved to free). +class Unreserved extends Event { + const Unreserved({required this.who, required this.amount}); + + factory Unreserved._decode(_i1.Input input) { + return Unreserved(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Unreserved': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Unreserved && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some balance was moved from the reserve of the first account to the second account. +/// Final argument indicates the destination balance type. +class ReserveRepatriated extends Event { + const ReserveRepatriated({ + required this.from, + required this.to, + required this.amount, + required this.destinationStatus, + }); + + factory ReserveRepatriated._decode(_i1.Input input) { + return ReserveRepatriated( + from: const _i1.U8ArrayCodec(32).decode(input), + to: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + destinationStatus: _i4.BalanceStatus.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 from; + + /// T::AccountId + final _i3.AccountId32 to; + + /// T::Balance + final BigInt amount; + + /// Status + final _i4.BalanceStatus destinationStatus; + + @override + Map> toJson() => { + 'ReserveRepatriated': { + 'from': from.toList(), + 'to': to.toList(), + 'amount': amount, + 'destinationStatus': destinationStatus.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(from); + size = size + const _i3.AccountId32Codec().sizeHint(to); + size = size + _i1.U128Codec.codec.sizeHint(amount); + size = size + _i4.BalanceStatus.codec.sizeHint(destinationStatus); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i1.U8ArrayCodec(32).encodeTo(from, output); + const _i1.U8ArrayCodec(32).encodeTo(to, output); + _i1.U128Codec.codec.encodeTo(amount, output); + _i4.BalanceStatus.codec.encodeTo(destinationStatus, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ReserveRepatriated && + _i5.listsEqual(other.from, from) && + _i5.listsEqual(other.to, to) && + other.amount == amount && + other.destinationStatus == destinationStatus; + + @override + int get hashCode => Object.hash(from, to, amount, destinationStatus); +} + +/// Some amount was deposited (e.g. for transaction fees). +class Deposit extends Event { + const Deposit({required this.who, required this.amount}); + + factory Deposit._decode(_i1.Input input) { + return Deposit(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Deposit': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Deposit && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some amount was withdrawn from the account (e.g. for transaction fees). +class Withdraw extends Event { + const Withdraw({required this.who, required this.amount}); + + factory Withdraw._decode(_i1.Input input) { + return Withdraw(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Withdraw': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Withdraw && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some amount was removed from the account (e.g. for misbehavior). +class Slashed extends Event { + const Slashed({required this.who, required this.amount}); + + factory Slashed._decode(_i1.Input input) { + return Slashed(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Slashed': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Slashed && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some amount was minted into an account. +class Minted extends Event { + const Minted({required this.who, required this.amount}); + + factory Minted._decode(_i1.Input input) { + return Minted(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Minted': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Minted && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some amount was burned from an account. +class Burned extends Event { + const Burned({required this.who, required this.amount}); + + factory Burned._decode(_i1.Input input) { + return Burned(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Burned': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Burned && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some amount was suspended from an account (it can be restored later). +class Suspended extends Event { + const Suspended({required this.who, required this.amount}); + + factory Suspended._decode(_i1.Input input) { + return Suspended(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Suspended': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Suspended && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some amount was restored into an account. +class Restored extends Event { + const Restored({required this.who, required this.amount}); + + factory Restored._decode(_i1.Input input) { + return Restored(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Restored': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Restored && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// An account was upgraded. +class Upgraded extends Event { + const Upgraded({required this.who}); + + factory Upgraded._decode(_i1.Input input) { + return Upgraded(who: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + @override + Map>> toJson() => { + 'Upgraded': {'who': who.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Upgraded && _i5.listsEqual(other.who, who); + + @override + int get hashCode => who.hashCode; +} + +/// Total issuance was increased by `amount`, creating a credit to be balanced. +class Issued extends Event { + const Issued({required this.amount}); + + factory Issued._decode(_i1.Input input) { + return Issued(amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Issued': {'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(15, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Issued && other.amount == amount; + + @override + int get hashCode => amount.hashCode; +} + +/// Total issuance was decreased by `amount`, creating a debt to be balanced. +class Rescinded extends Event { + const Rescinded({required this.amount}); + + factory Rescinded._decode(_i1.Input input) { + return Rescinded(amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Rescinded': {'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(16, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Rescinded && other.amount == amount; + + @override + int get hashCode => amount.hashCode; +} + +/// Some balance was locked. +class Locked extends Event { + const Locked({required this.who, required this.amount}); + + factory Locked._decode(_i1.Input input) { + return Locked(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Locked': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(17, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Locked && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some balance was unlocked. +class Unlocked extends Event { + const Unlocked({required this.who, required this.amount}); + + factory Unlocked._decode(_i1.Input input) { + return Unlocked(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Unlocked': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(18, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Unlocked && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some balance was frozen. +class Frozen extends Event { + const Frozen({required this.who, required this.amount}); + + factory Frozen._decode(_i1.Input input) { + return Frozen(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Frozen': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(19, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Frozen && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// Some balance was thawed. +class Thawed extends Event { + const Thawed({required this.who, required this.amount}); + + factory Thawed._decode(_i1.Input input) { + return Thawed(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Balance + final BigInt amount; + + @override + Map> toJson() => { + 'Thawed': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(20, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Thawed && _i5.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// The `TotalIssuance` was forcefully changed. +class TotalIssuanceForced extends Event { + const TotalIssuanceForced({required this.old, required this.new_}); + + factory TotalIssuanceForced._decode(_i1.Input input) { + return TotalIssuanceForced(old: _i1.U128Codec.codec.decode(input), new_: _i1.U128Codec.codec.decode(input)); + } + + /// T::Balance + final BigInt old; + + /// T::Balance + final BigInt new_; + + @override + Map> toJson() => { + 'TotalIssuanceForced': {'old': old, 'new': new_}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(old); + size = size + _i1.U128Codec.codec.sizeHint(new_); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(21, output); + _i1.U128Codec.codec.encodeTo(old, output); + _i1.U128Codec.codec.encodeTo(new_, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TotalIssuanceForced && other.old == old && other.new_ == new_; + + @override + int get hashCode => Object.hash(old, new_); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/types/account_data.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/account_data.dart new file mode 100644 index 00000000..751ca8ed --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/account_data.dart @@ -0,0 +1,78 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'extra_flags.dart' as _i2; + +class AccountData { + const AccountData({required this.free, required this.reserved, required this.frozen, required this.flags}); + + factory AccountData.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Balance + final BigInt free; + + /// Balance + final BigInt reserved; + + /// Balance + final BigInt frozen; + + /// ExtraFlags + final _i2.ExtraFlags flags; + + static const $AccountDataCodec codec = $AccountDataCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'free': free, 'reserved': reserved, 'frozen': frozen, 'flags': flags}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AccountData && + other.free == free && + other.reserved == reserved && + other.frozen == frozen && + other.flags == flags; + + @override + int get hashCode => Object.hash(free, reserved, frozen, flags); +} + +class $AccountDataCodec with _i1.Codec { + const $AccountDataCodec(); + + @override + void encodeTo(AccountData obj, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(obj.free, output); + _i1.U128Codec.codec.encodeTo(obj.reserved, output); + _i1.U128Codec.codec.encodeTo(obj.frozen, output); + _i1.U128Codec.codec.encodeTo(obj.flags, output); + } + + @override + AccountData decode(_i1.Input input) { + return AccountData( + free: _i1.U128Codec.codec.decode(input), + reserved: _i1.U128Codec.codec.decode(input), + frozen: _i1.U128Codec.codec.decode(input), + flags: _i1.U128Codec.codec.decode(input), + ); + } + + @override + int sizeHint(AccountData obj) { + int size = 0; + size = size + _i1.U128Codec.codec.sizeHint(obj.free); + size = size + _i1.U128Codec.codec.sizeHint(obj.reserved); + size = size + _i1.U128Codec.codec.sizeHint(obj.frozen); + size = size + const _i2.ExtraFlagsCodec().sizeHint(obj.flags); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/types/adjustment_direction.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/adjustment_direction.dart new file mode 100644 index 00000000..8e87929c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/adjustment_direction.dart @@ -0,0 +1,49 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum AdjustmentDirection { + increase('Increase', 0), + decrease('Decrease', 1); + + const AdjustmentDirection(this.variantName, this.codecIndex); + + factory AdjustmentDirection.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $AdjustmentDirectionCodec codec = $AdjustmentDirectionCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $AdjustmentDirectionCodec with _i1.Codec { + const $AdjustmentDirectionCodec(); + + @override + AdjustmentDirection decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return AdjustmentDirection.increase; + case 1: + return AdjustmentDirection.decrease; + default: + throw Exception('AdjustmentDirection: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(AdjustmentDirection value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/types/balance_lock.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/balance_lock.dart new file mode 100644 index 00000000..9f9f90bc --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/balance_lock.dart @@ -0,0 +1,69 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import 'reasons.dart' as _i2; + +class BalanceLock { + const BalanceLock({required this.id, required this.amount, required this.reasons}); + + factory BalanceLock.decode(_i1.Input input) { + return codec.decode(input); + } + + /// LockIdentifier + final List id; + + /// Balance + final BigInt amount; + + /// Reasons + final _i2.Reasons reasons; + + static const $BalanceLockCodec codec = $BalanceLockCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'id': id.toList(), 'amount': amount, 'reasons': reasons.toJson()}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is BalanceLock && _i4.listsEqual(other.id, id) && other.amount == amount && other.reasons == reasons; + + @override + int get hashCode => Object.hash(id, amount, reasons); +} + +class $BalanceLockCodec with _i1.Codec { + const $BalanceLockCodec(); + + @override + void encodeTo(BalanceLock obj, _i1.Output output) { + const _i1.U8ArrayCodec(8).encodeTo(obj.id, output); + _i1.U128Codec.codec.encodeTo(obj.amount, output); + _i2.Reasons.codec.encodeTo(obj.reasons, output); + } + + @override + BalanceLock decode(_i1.Input input) { + return BalanceLock( + id: const _i1.U8ArrayCodec(8).decode(input), + amount: _i1.U128Codec.codec.decode(input), + reasons: _i2.Reasons.codec.decode(input), + ); + } + + @override + int sizeHint(BalanceLock obj) { + int size = 0; + size = size + const _i1.U8ArrayCodec(8).sizeHint(obj.id); + size = size + _i1.U128Codec.codec.sizeHint(obj.amount); + size = size + _i2.Reasons.codec.sizeHint(obj.reasons); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/types/extra_flags.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/extra_flags.dart new file mode 100644 index 00000000..275f36a9 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/extra_flags.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef ExtraFlags = BigInt; + +class ExtraFlagsCodec with _i1.Codec { + const ExtraFlagsCodec(); + + @override + ExtraFlags decode(_i1.Input input) { + return _i1.U128Codec.codec.decode(input); + } + + @override + void encodeTo(ExtraFlags value, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(value, output); + } + + @override + int sizeHint(ExtraFlags value) { + return _i1.U128Codec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/types/reasons.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/reasons.dart new file mode 100644 index 00000000..6fbab947 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/reasons.dart @@ -0,0 +1,52 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum Reasons { + fee('Fee', 0), + misc('Misc', 1), + all('All', 2); + + const Reasons(this.variantName, this.codecIndex); + + factory Reasons.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ReasonsCodec codec = $ReasonsCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ReasonsCodec with _i1.Codec { + const $ReasonsCodec(); + + @override + Reasons decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Reasons.fee; + case 1: + return Reasons.misc; + case 2: + return Reasons.all; + default: + throw Exception('Reasons: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Reasons value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_balances/types/reserve_data.dart b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/reserve_data.dart new file mode 100644 index 00000000..c1436d9a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_balances/types/reserve_data.dart @@ -0,0 +1,57 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i3; + +class ReserveData { + const ReserveData({required this.id, required this.amount}); + + factory ReserveData.decode(_i1.Input input) { + return codec.decode(input); + } + + /// ReserveIdentifier + final List id; + + /// Balance + final BigInt amount; + + static const $ReserveDataCodec codec = $ReserveDataCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'id': id.toList(), 'amount': amount}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is ReserveData && _i3.listsEqual(other.id, id) && other.amount == amount; + + @override + int get hashCode => Object.hash(id, amount); +} + +class $ReserveDataCodec with _i1.Codec { + const $ReserveDataCodec(); + + @override + void encodeTo(ReserveData obj, _i1.Output output) { + const _i1.U8ArrayCodec(8).encodeTo(obj.id, output); + _i1.U128Codec.codec.encodeTo(obj.amount, output); + } + + @override + ReserveData decode(_i1.Input input) { + return ReserveData(id: const _i1.U8ArrayCodec(8).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + @override + int sizeHint(ReserveData obj) { + int size = 0; + size = size + const _i1.U8ArrayCodec(8).sizeHint(obj.id); + size = size + _i1.U128Codec.codec.sizeHint(obj.amount); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/conviction/conviction.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/conviction/conviction.dart new file mode 100644 index 00000000..f4ed4eeb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/conviction/conviction.dart @@ -0,0 +1,64 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum Conviction { + none('None', 0), + locked1x('Locked1x', 1), + locked2x('Locked2x', 2), + locked3x('Locked3x', 3), + locked4x('Locked4x', 4), + locked5x('Locked5x', 5), + locked6x('Locked6x', 6); + + const Conviction(this.variantName, this.codecIndex); + + factory Conviction.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ConvictionCodec codec = $ConvictionCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ConvictionCodec with _i1.Codec { + const $ConvictionCodec(); + + @override + Conviction decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Conviction.none; + case 1: + return Conviction.locked1x; + case 2: + return Conviction.locked2x; + case 3: + return Conviction.locked3x; + case 4: + return Conviction.locked4x; + case 5: + return Conviction.locked5x; + case 6: + return Conviction.locked6x; + default: + throw Exception('Conviction: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Conviction value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/call.dart new file mode 100644 index 00000000..45074a76 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/call.dart @@ -0,0 +1,498 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../sp_runtime/multiaddress/multi_address.dart' as _i4; +import '../conviction/conviction.dart' as _i5; +import '../vote/account_vote.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + Vote vote({required BigInt pollIndex, required _i3.AccountVote vote}) { + return Vote(pollIndex: pollIndex, vote: vote); + } + + Delegate delegate({ + required int class_, + required _i4.MultiAddress to, + required _i5.Conviction conviction, + required BigInt balance, + }) { + return Delegate(class_: class_, to: to, conviction: conviction, balance: balance); + } + + Undelegate undelegate({required int class_}) { + return Undelegate(class_: class_); + } + + Unlock unlock({required int class_, required _i4.MultiAddress target}) { + return Unlock(class_: class_, target: target); + } + + RemoveVote removeVote({int? class_, required int index}) { + return RemoveVote(class_: class_, index: index); + } + + RemoveOtherVote removeOtherVote({required _i4.MultiAddress target, required int class_, required int index}) { + return RemoveOtherVote(target: target, class_: class_, index: index); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Vote._decode(input); + case 1: + return Delegate._decode(input); + case 2: + return Undelegate._decode(input); + case 3: + return Unlock._decode(input); + case 4: + return RemoveVote._decode(input); + case 5: + return RemoveOtherVote._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Vote: + (value as Vote).encodeTo(output); + break; + case Delegate: + (value as Delegate).encodeTo(output); + break; + case Undelegate: + (value as Undelegate).encodeTo(output); + break; + case Unlock: + (value as Unlock).encodeTo(output); + break; + case RemoveVote: + (value as RemoveVote).encodeTo(output); + break; + case RemoveOtherVote: + (value as RemoveOtherVote).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Vote: + return (value as Vote)._sizeHint(); + case Delegate: + return (value as Delegate)._sizeHint(); + case Undelegate: + return (value as Undelegate)._sizeHint(); + case Unlock: + return (value as Unlock)._sizeHint(); + case RemoveVote: + return (value as RemoveVote)._sizeHint(); + case RemoveOtherVote: + return (value as RemoveOtherVote)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Vote in a poll. If `vote.is_aye()`, the vote is to enact the proposal; +/// otherwise it is a vote to keep the status quo. +/// +/// The dispatch origin of this call must be _Signed_. +/// +/// - `poll_index`: The index of the poll to vote for. +/// - `vote`: The vote configuration. +/// +/// Weight: `O(R)` where R is the number of polls the voter has voted on. +class Vote extends Call { + const Vote({required this.pollIndex, required this.vote}); + + factory Vote._decode(_i1.Input input) { + return Vote(pollIndex: _i1.CompactBigIntCodec.codec.decode(input), vote: _i3.AccountVote.codec.decode(input)); + } + + /// PollIndexOf + final BigInt pollIndex; + + /// AccountVote> + final _i3.AccountVote vote; + + @override + Map> toJson() => { + 'vote': {'pollIndex': pollIndex, 'vote': vote.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(pollIndex); + size = size + _i3.AccountVote.codec.sizeHint(vote); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.CompactBigIntCodec.codec.encodeTo(pollIndex, output); + _i3.AccountVote.codec.encodeTo(vote, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Vote && other.pollIndex == pollIndex && other.vote == vote; + + @override + int get hashCode => Object.hash(pollIndex, vote); +} + +/// Delegate the voting power (with some given conviction) of the sending account for a +/// particular class of polls. +/// +/// The balance delegated is locked for as long as it's delegated, and thereafter for the +/// time appropriate for the conviction's lock period. +/// +/// The dispatch origin of this call must be _Signed_, and the signing account must either: +/// - be delegating already; or +/// - have no voting activity (if there is, then it will need to be removed through +/// `remove_vote`). +/// +/// - `to`: The account whose voting the `target` account's voting power will follow. +/// - `class`: The class of polls to delegate. To delegate multiple classes, multiple calls +/// to this function are required. +/// - `conviction`: The conviction that will be attached to the delegated votes. When the +/// account is undelegated, the funds will be locked for the corresponding period. +/// - `balance`: The amount of the account's balance to be used in delegating. This must not +/// be more than the account's current balance. +/// +/// Emits `Delegated`. +/// +/// Weight: `O(R)` where R is the number of polls the voter delegating to has +/// voted on. Weight is initially charged as if maximum votes, but is refunded later. +class Delegate extends Call { + const Delegate({required this.class_, required this.to, required this.conviction, required this.balance}); + + factory Delegate._decode(_i1.Input input) { + return Delegate( + class_: _i1.U16Codec.codec.decode(input), + to: _i4.MultiAddress.codec.decode(input), + conviction: _i5.Conviction.codec.decode(input), + balance: _i1.U128Codec.codec.decode(input), + ); + } + + /// ClassOf + final int class_; + + /// AccountIdLookupOf + final _i4.MultiAddress to; + + /// Conviction + final _i5.Conviction conviction; + + /// BalanceOf + final BigInt balance; + + @override + Map> toJson() => { + 'delegate': {'class': class_, 'to': to.toJson(), 'conviction': conviction.toJson(), 'balance': balance}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U16Codec.codec.sizeHint(class_); + size = size + _i4.MultiAddress.codec.sizeHint(to); + size = size + _i5.Conviction.codec.sizeHint(conviction); + size = size + _i1.U128Codec.codec.sizeHint(balance); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U16Codec.codec.encodeTo(class_, output); + _i4.MultiAddress.codec.encodeTo(to, output); + _i5.Conviction.codec.encodeTo(conviction, output); + _i1.U128Codec.codec.encodeTo(balance, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Delegate && + other.class_ == class_ && + other.to == to && + other.conviction == conviction && + other.balance == balance; + + @override + int get hashCode => Object.hash(class_, to, conviction, balance); +} + +/// Undelegate the voting power of the sending account for a particular class of polls. +/// +/// Tokens may be unlocked following once an amount of time consistent with the lock period +/// of the conviction with which the delegation was issued has passed. +/// +/// The dispatch origin of this call must be _Signed_ and the signing account must be +/// currently delegating. +/// +/// - `class`: The class of polls to remove the delegation from. +/// +/// Emits `Undelegated`. +/// +/// Weight: `O(R)` where R is the number of polls the voter delegating to has +/// voted on. Weight is initially charged as if maximum votes, but is refunded later. +class Undelegate extends Call { + const Undelegate({required this.class_}); + + factory Undelegate._decode(_i1.Input input) { + return Undelegate(class_: _i1.U16Codec.codec.decode(input)); + } + + /// ClassOf + final int class_; + + @override + Map> toJson() => { + 'undelegate': {'class': class_}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U16Codec.codec.sizeHint(class_); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U16Codec.codec.encodeTo(class_, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Undelegate && other.class_ == class_; + + @override + int get hashCode => class_.hashCode; +} + +/// Remove the lock caused by prior voting/delegating which has expired within a particular +/// class. +/// +/// The dispatch origin of this call must be _Signed_. +/// +/// - `class`: The class of polls to unlock. +/// - `target`: The account to remove the lock on. +/// +/// Weight: `O(R)` with R number of vote of target. +class Unlock extends Call { + const Unlock({required this.class_, required this.target}); + + factory Unlock._decode(_i1.Input input) { + return Unlock(class_: _i1.U16Codec.codec.decode(input), target: _i4.MultiAddress.codec.decode(input)); + } + + /// ClassOf + final int class_; + + /// AccountIdLookupOf + final _i4.MultiAddress target; + + @override + Map> toJson() => { + 'unlock': {'class': class_, 'target': target.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U16Codec.codec.sizeHint(class_); + size = size + _i4.MultiAddress.codec.sizeHint(target); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.U16Codec.codec.encodeTo(class_, output); + _i4.MultiAddress.codec.encodeTo(target, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Unlock && other.class_ == class_ && other.target == target; + + @override + int get hashCode => Object.hash(class_, target); +} + +/// Remove a vote for a poll. +/// +/// If: +/// - the poll was cancelled, or +/// - the poll is ongoing, or +/// - the poll has ended such that +/// - the vote of the account was in opposition to the result; or +/// - there was no conviction to the account's vote; or +/// - the account made a split vote +/// ...then the vote is removed cleanly and a following call to `unlock` may result in more +/// funds being available. +/// +/// If, however, the poll has ended and: +/// - it finished corresponding to the vote of the account, and +/// - the account made a standard vote with conviction, and +/// - the lock period of the conviction is not over +/// ...then the lock will be aggregated into the overall account's lock, which may involve +/// *overlocking* (where the two locks are combined into a single lock that is the maximum +/// of both the amount locked and the time is it locked for). +/// +/// The dispatch origin of this call must be _Signed_, and the signer must have a vote +/// registered for poll `index`. +/// +/// - `index`: The index of poll of the vote to be removed. +/// - `class`: Optional parameter, if given it indicates the class of the poll. For polls +/// which have finished or are cancelled, this must be `Some`. +/// +/// Weight: `O(R + log R)` where R is the number of polls that `target` has voted on. +/// Weight is calculated for the maximum number of vote. +class RemoveVote extends Call { + const RemoveVote({this.class_, required this.index}); + + factory RemoveVote._decode(_i1.Input input) { + return RemoveVote( + class_: const _i1.OptionCodec(_i1.U16Codec.codec).decode(input), + index: _i1.U32Codec.codec.decode(input), + ); + } + + /// Option> + final int? class_; + + /// PollIndexOf + final int index; + + @override + Map> toJson() => { + 'remove_vote': {'class': class_, 'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.OptionCodec(_i1.U16Codec.codec).sizeHint(class_); + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.OptionCodec(_i1.U16Codec.codec).encodeTo(class_, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RemoveVote && other.class_ == class_ && other.index == index; + + @override + int get hashCode => Object.hash(class_, index); +} + +/// Remove a vote for a poll. +/// +/// If the `target` is equal to the signer, then this function is exactly equivalent to +/// `remove_vote`. If not equal to the signer, then the vote must have expired, +/// either because the poll was cancelled, because the voter lost the poll or +/// because the conviction period is over. +/// +/// The dispatch origin of this call must be _Signed_. +/// +/// - `target`: The account of the vote to be removed; this account must have voted for poll +/// `index`. +/// - `index`: The index of poll of the vote to be removed. +/// - `class`: The class of the poll. +/// +/// Weight: `O(R + log R)` where R is the number of polls that `target` has voted on. +/// Weight is calculated for the maximum number of vote. +class RemoveOtherVote extends Call { + const RemoveOtherVote({required this.target, required this.class_, required this.index}); + + factory RemoveOtherVote._decode(_i1.Input input) { + return RemoveOtherVote( + target: _i4.MultiAddress.codec.decode(input), + class_: _i1.U16Codec.codec.decode(input), + index: _i1.U32Codec.codec.decode(input), + ); + } + + /// AccountIdLookupOf + final _i4.MultiAddress target; + + /// ClassOf + final int class_; + + /// PollIndexOf + final int index; + + @override + Map> toJson() => { + 'remove_other_vote': {'target': target.toJson(), 'class': class_, 'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i4.MultiAddress.codec.sizeHint(target); + size = size + _i1.U16Codec.codec.sizeHint(class_); + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i4.MultiAddress.codec.encodeTo(target, output); + _i1.U16Codec.codec.encodeTo(class_, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RemoveOtherVote && other.target == target && other.class_ == class_ && other.index == index; + + @override + int get hashCode => Object.hash(target, class_, index); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/error.dart new file mode 100644 index 00000000..783ece3d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/error.dart @@ -0,0 +1,104 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Poll is not ongoing. + notOngoing('NotOngoing', 0), + + /// The given account did not vote on the poll. + notVoter('NotVoter', 1), + + /// The actor has no permission to conduct the action. + noPermission('NoPermission', 2), + + /// The actor has no permission to conduct the action right now but will do in the future. + noPermissionYet('NoPermissionYet', 3), + + /// The account is already delegating. + alreadyDelegating('AlreadyDelegating', 4), + + /// The account currently has votes attached to it and the operation cannot succeed until + /// these are removed through `remove_vote`. + alreadyVoting('AlreadyVoting', 5), + + /// Too high a balance was provided that the account cannot afford. + insufficientFunds('InsufficientFunds', 6), + + /// The account is not currently delegating. + notDelegating('NotDelegating', 7), + + /// Delegation to oneself makes no sense. + nonsense('Nonsense', 8), + + /// Maximum number of votes reached. + maxVotesReached('MaxVotesReached', 9), + + /// The class must be supplied since it is not easily determinable from the state. + classNeeded('ClassNeeded', 10), + + /// The class ID supplied is invalid. + badClass('BadClass', 11); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.notOngoing; + case 1: + return Error.notVoter; + case 2: + return Error.noPermission; + case 3: + return Error.noPermissionYet; + case 4: + return Error.alreadyDelegating; + case 5: + return Error.alreadyVoting; + case 6: + return Error.insufficientFunds; + case 7: + return Error.notDelegating; + case 8: + return Error.nonsense; + case 9: + return Error.maxVotesReached; + case 10: + return Error.classNeeded; + case 11: + return Error.badClass; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/event.dart new file mode 100644 index 00000000..3de84afc --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/pallet/event.dart @@ -0,0 +1,315 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../../sp_core/crypto/account_id32.dart' as _i3; +import '../vote/account_vote.dart' as _i4; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Event { + const $Event(); + + Delegated delegated(_i3.AccountId32 value0, _i3.AccountId32 value1) { + return Delegated(value0, value1); + } + + Undelegated undelegated(_i3.AccountId32 value0) { + return Undelegated(value0); + } + + Voted voted({required _i3.AccountId32 who, required _i4.AccountVote vote}) { + return Voted(who: who, vote: vote); + } + + VoteRemoved voteRemoved({required _i3.AccountId32 who, required _i4.AccountVote vote}) { + return VoteRemoved(who: who, vote: vote); + } + + VoteUnlocked voteUnlocked({required _i3.AccountId32 who, required int class_}) { + return VoteUnlocked(who: who, class_: class_); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Delegated._decode(input); + case 1: + return Undelegated._decode(input); + case 2: + return Voted._decode(input); + case 3: + return VoteRemoved._decode(input); + case 4: + return VoteUnlocked._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Delegated: + (value as Delegated).encodeTo(output); + break; + case Undelegated: + (value as Undelegated).encodeTo(output); + break; + case Voted: + (value as Voted).encodeTo(output); + break; + case VoteRemoved: + (value as VoteRemoved).encodeTo(output); + break; + case VoteUnlocked: + (value as VoteUnlocked).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Delegated: + return (value as Delegated)._sizeHint(); + case Undelegated: + return (value as Undelegated)._sizeHint(); + case Voted: + return (value as Voted)._sizeHint(); + case VoteRemoved: + return (value as VoteRemoved)._sizeHint(); + case VoteUnlocked: + return (value as VoteUnlocked)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// An account has delegated their vote to another account. \[who, target\] +class Delegated extends Event { + const Delegated(this.value0, this.value1); + + factory Delegated._decode(_i1.Input input) { + return Delegated(const _i1.U8ArrayCodec(32).decode(input), const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 value0; + + /// T::AccountId + final _i3.AccountId32 value1; + + @override + Map>> toJson() => { + 'Delegated': [value0.toList(), value1.toList()], + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(value0); + size = size + const _i3.AccountId32Codec().sizeHint(value1); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(value0, output); + const _i1.U8ArrayCodec(32).encodeTo(value1, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Delegated && _i5.listsEqual(other.value0, value0) && _i5.listsEqual(other.value1, value1); + + @override + int get hashCode => Object.hash(value0, value1); +} + +/// An \[account\] has cancelled a previous delegation operation. +class Undelegated extends Event { + const Undelegated(this.value0); + + factory Undelegated._decode(_i1.Input input) { + return Undelegated(const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 value0; + + @override + Map> toJson() => {'Undelegated': value0.toList()}; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Undelegated && _i5.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} + +/// An account has voted +class Voted extends Event { + const Voted({required this.who, required this.vote}); + + factory Voted._decode(_i1.Input input) { + return Voted(who: const _i1.U8ArrayCodec(32).decode(input), vote: _i4.AccountVote.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// AccountVote> + final _i4.AccountVote vote; + + @override + Map> toJson() => { + 'Voted': {'who': who.toList(), 'vote': vote.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i4.AccountVote.codec.sizeHint(vote); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i4.AccountVote.codec.encodeTo(vote, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Voted && _i5.listsEqual(other.who, who) && other.vote == vote; + + @override + int get hashCode => Object.hash(who, vote); +} + +/// A vote has been removed +class VoteRemoved extends Event { + const VoteRemoved({required this.who, required this.vote}); + + factory VoteRemoved._decode(_i1.Input input) { + return VoteRemoved(who: const _i1.U8ArrayCodec(32).decode(input), vote: _i4.AccountVote.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// AccountVote> + final _i4.AccountVote vote; + + @override + Map> toJson() => { + 'VoteRemoved': {'who': who.toList(), 'vote': vote.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i4.AccountVote.codec.sizeHint(vote); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i4.AccountVote.codec.encodeTo(vote, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is VoteRemoved && _i5.listsEqual(other.who, who) && other.vote == vote; + + @override + int get hashCode => Object.hash(who, vote); +} + +/// The lockup period of a conviction vote expired, and the funds have been unlocked. +class VoteUnlocked extends Event { + const VoteUnlocked({required this.who, required this.class_}); + + factory VoteUnlocked._decode(_i1.Input input) { + return VoteUnlocked(who: const _i1.U8ArrayCodec(32).decode(input), class_: _i1.U16Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// ClassOf + final int class_; + + @override + Map> toJson() => { + 'VoteUnlocked': {'who': who.toList(), 'class': class_}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U16Codec.codec.sizeHint(class_); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U16Codec.codec.encodeTo(class_, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is VoteUnlocked && _i5.listsEqual(other.who, who) && other.class_ == class_; + + @override + int get hashCode => Object.hash(who, class_); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/types/delegations.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/types/delegations.dart new file mode 100644 index 00000000..fdac6304 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/types/delegations.dart @@ -0,0 +1,56 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class Delegations { + const Delegations({required this.votes, required this.capital}); + + factory Delegations.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Balance + final BigInt votes; + + /// Balance + final BigInt capital; + + static const $DelegationsCodec codec = $DelegationsCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'votes': votes, 'capital': capital}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is Delegations && other.votes == votes && other.capital == capital; + + @override + int get hashCode => Object.hash(votes, capital); +} + +class $DelegationsCodec with _i1.Codec { + const $DelegationsCodec(); + + @override + void encodeTo(Delegations obj, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(obj.votes, output); + _i1.U128Codec.codec.encodeTo(obj.capital, output); + } + + @override + Delegations decode(_i1.Input input) { + return Delegations(votes: _i1.U128Codec.codec.decode(input), capital: _i1.U128Codec.codec.decode(input)); + } + + @override + int sizeHint(Delegations obj) { + int size = 0; + size = size + _i1.U128Codec.codec.sizeHint(obj.votes); + size = size + _i1.U128Codec.codec.sizeHint(obj.capital); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/types/tally.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/types/tally.dart new file mode 100644 index 00000000..6b84fb7d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/types/tally.dart @@ -0,0 +1,65 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class Tally { + const Tally({required this.ayes, required this.nays, required this.support}); + + factory Tally.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Votes + final BigInt ayes; + + /// Votes + final BigInt nays; + + /// Votes + final BigInt support; + + static const $TallyCodec codec = $TallyCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'ayes': ayes, 'nays': nays, 'support': support}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is Tally && other.ayes == ayes && other.nays == nays && other.support == support; + + @override + int get hashCode => Object.hash(ayes, nays, support); +} + +class $TallyCodec with _i1.Codec { + const $TallyCodec(); + + @override + void encodeTo(Tally obj, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(obj.ayes, output); + _i1.U128Codec.codec.encodeTo(obj.nays, output); + _i1.U128Codec.codec.encodeTo(obj.support, output); + } + + @override + Tally decode(_i1.Input input) { + return Tally( + ayes: _i1.U128Codec.codec.decode(input), + nays: _i1.U128Codec.codec.decode(input), + support: _i1.U128Codec.codec.decode(input), + ); + } + + @override + int sizeHint(Tally obj) { + int size = 0; + size = size + _i1.U128Codec.codec.sizeHint(obj.ayes); + size = size + _i1.U128Codec.codec.sizeHint(obj.nays); + size = size + _i1.U128Codec.codec.sizeHint(obj.support); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/account_vote.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/account_vote.dart new file mode 100644 index 00000000..c0f7997b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/account_vote.dart @@ -0,0 +1,222 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'vote.dart' as _i3; + +abstract class AccountVote { + const AccountVote(); + + factory AccountVote.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $AccountVoteCodec codec = $AccountVoteCodec(); + + static const $AccountVote values = $AccountVote(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $AccountVote { + const $AccountVote(); + + Standard standard({required _i3.Vote vote, required BigInt balance}) { + return Standard(vote: vote, balance: balance); + } + + Split split({required BigInt aye, required BigInt nay}) { + return Split(aye: aye, nay: nay); + } + + SplitAbstain splitAbstain({required BigInt aye, required BigInt nay, required BigInt abstain}) { + return SplitAbstain(aye: aye, nay: nay, abstain: abstain); + } +} + +class $AccountVoteCodec with _i1.Codec { + const $AccountVoteCodec(); + + @override + AccountVote decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Standard._decode(input); + case 1: + return Split._decode(input); + case 2: + return SplitAbstain._decode(input); + default: + throw Exception('AccountVote: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(AccountVote value, _i1.Output output) { + switch (value.runtimeType) { + case Standard: + (value as Standard).encodeTo(output); + break; + case Split: + (value as Split).encodeTo(output); + break; + case SplitAbstain: + (value as SplitAbstain).encodeTo(output); + break; + default: + throw Exception('AccountVote: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(AccountVote value) { + switch (value.runtimeType) { + case Standard: + return (value as Standard)._sizeHint(); + case Split: + return (value as Split)._sizeHint(); + case SplitAbstain: + return (value as SplitAbstain)._sizeHint(); + default: + throw Exception('AccountVote: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Standard extends AccountVote { + const Standard({required this.vote, required this.balance}); + + factory Standard._decode(_i1.Input input) { + return Standard(vote: _i1.U8Codec.codec.decode(input), balance: _i1.U128Codec.codec.decode(input)); + } + + /// Vote + final _i3.Vote vote; + + /// Balance + final BigInt balance; + + @override + Map> toJson() => { + 'Standard': {'vote': vote, 'balance': balance}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.VoteCodec().sizeHint(vote); + size = size + _i1.U128Codec.codec.sizeHint(balance); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U8Codec.codec.encodeTo(vote, output); + _i1.U128Codec.codec.encodeTo(balance, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Standard && other.vote == vote && other.balance == balance; + + @override + int get hashCode => Object.hash(vote, balance); +} + +class Split extends AccountVote { + const Split({required this.aye, required this.nay}); + + factory Split._decode(_i1.Input input) { + return Split(aye: _i1.U128Codec.codec.decode(input), nay: _i1.U128Codec.codec.decode(input)); + } + + /// Balance + final BigInt aye; + + /// Balance + final BigInt nay; + + @override + Map> toJson() => { + 'Split': {'aye': aye, 'nay': nay}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(aye); + size = size + _i1.U128Codec.codec.sizeHint(nay); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U128Codec.codec.encodeTo(aye, output); + _i1.U128Codec.codec.encodeTo(nay, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Split && other.aye == aye && other.nay == nay; + + @override + int get hashCode => Object.hash(aye, nay); +} + +class SplitAbstain extends AccountVote { + const SplitAbstain({required this.aye, required this.nay, required this.abstain}); + + factory SplitAbstain._decode(_i1.Input input) { + return SplitAbstain( + aye: _i1.U128Codec.codec.decode(input), + nay: _i1.U128Codec.codec.decode(input), + abstain: _i1.U128Codec.codec.decode(input), + ); + } + + /// Balance + final BigInt aye; + + /// Balance + final BigInt nay; + + /// Balance + final BigInt abstain; + + @override + Map> toJson() => { + 'SplitAbstain': {'aye': aye, 'nay': nay, 'abstain': abstain}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(aye); + size = size + _i1.U128Codec.codec.sizeHint(nay); + size = size + _i1.U128Codec.codec.sizeHint(abstain); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U128Codec.codec.encodeTo(aye, output); + _i1.U128Codec.codec.encodeTo(nay, output); + _i1.U128Codec.codec.encodeTo(abstain, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SplitAbstain && other.aye == aye && other.nay == nay && other.abstain == abstain; + + @override + int get hashCode => Object.hash(aye, nay, abstain); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/casting.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/casting.dart new file mode 100644 index 00000000..851190cf --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/casting.dart @@ -0,0 +1,87 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i6; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i7; + +import '../../tuples.dart' as _i2; +import '../types/delegations.dart' as _i4; +import 'account_vote.dart' as _i3; +import 'prior_lock.dart' as _i5; + +class Casting { + const Casting({required this.votes, required this.delegations, required this.prior}); + + factory Casting.decode(_i1.Input input) { + return codec.decode(input); + } + + /// BoundedVec<(PollIndex, AccountVote), MaxVotes> + final List<_i2.Tuple2> votes; + + /// Delegations + final _i4.Delegations delegations; + + /// PriorLock + final _i5.PriorLock prior; + + static const $CastingCodec codec = $CastingCodec(); + + _i6.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'votes': votes.map((value) => [value.value0, value.value1.toJson()]).toList(), + 'delegations': delegations.toJson(), + 'prior': prior.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Casting && + _i7.listsEqual(other.votes, votes) && + other.delegations == delegations && + other.prior == prior; + + @override + int get hashCode => Object.hash(votes, delegations, prior); +} + +class $CastingCodec with _i1.Codec { + const $CastingCodec(); + + @override + void encodeTo(Casting obj, _i1.Output output) { + const _i1.SequenceCodec<_i2.Tuple2>( + _i2.Tuple2Codec(_i1.U32Codec.codec, _i3.AccountVote.codec), + ).encodeTo(obj.votes, output); + _i4.Delegations.codec.encodeTo(obj.delegations, output); + _i5.PriorLock.codec.encodeTo(obj.prior, output); + } + + @override + Casting decode(_i1.Input input) { + return Casting( + votes: const _i1.SequenceCodec<_i2.Tuple2>( + _i2.Tuple2Codec(_i1.U32Codec.codec, _i3.AccountVote.codec), + ).decode(input), + delegations: _i4.Delegations.codec.decode(input), + prior: _i5.PriorLock.codec.decode(input), + ); + } + + @override + int sizeHint(Casting obj) { + int size = 0; + size = + size + + const _i1.SequenceCodec<_i2.Tuple2>( + _i2.Tuple2Codec(_i1.U32Codec.codec, _i3.AccountVote.codec), + ).sizeHint(obj.votes); + size = size + _i4.Delegations.codec.sizeHint(obj.delegations); + size = size + _i5.PriorLock.codec.sizeHint(obj.prior); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/delegating.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/delegating.dart new file mode 100644 index 00000000..c18056c7 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/delegating.dart @@ -0,0 +1,101 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i6; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i7; + +import '../../sp_core/crypto/account_id32.dart' as _i2; +import '../conviction/conviction.dart' as _i3; +import '../types/delegations.dart' as _i4; +import 'prior_lock.dart' as _i5; + +class Delegating { + const Delegating({ + required this.balance, + required this.target, + required this.conviction, + required this.delegations, + required this.prior, + }); + + factory Delegating.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Balance + final BigInt balance; + + /// AccountId + final _i2.AccountId32 target; + + /// Conviction + final _i3.Conviction conviction; + + /// Delegations + final _i4.Delegations delegations; + + /// PriorLock + final _i5.PriorLock prior; + + static const $DelegatingCodec codec = $DelegatingCodec(); + + _i6.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'balance': balance, + 'target': target.toList(), + 'conviction': conviction.toJson(), + 'delegations': delegations.toJson(), + 'prior': prior.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Delegating && + other.balance == balance && + _i7.listsEqual(other.target, target) && + other.conviction == conviction && + other.delegations == delegations && + other.prior == prior; + + @override + int get hashCode => Object.hash(balance, target, conviction, delegations, prior); +} + +class $DelegatingCodec with _i1.Codec { + const $DelegatingCodec(); + + @override + void encodeTo(Delegating obj, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(obj.balance, output); + const _i1.U8ArrayCodec(32).encodeTo(obj.target, output); + _i3.Conviction.codec.encodeTo(obj.conviction, output); + _i4.Delegations.codec.encodeTo(obj.delegations, output); + _i5.PriorLock.codec.encodeTo(obj.prior, output); + } + + @override + Delegating decode(_i1.Input input) { + return Delegating( + balance: _i1.U128Codec.codec.decode(input), + target: const _i1.U8ArrayCodec(32).decode(input), + conviction: _i3.Conviction.codec.decode(input), + delegations: _i4.Delegations.codec.decode(input), + prior: _i5.PriorLock.codec.decode(input), + ); + } + + @override + int sizeHint(Delegating obj) { + int size = 0; + size = size + _i1.U128Codec.codec.sizeHint(obj.balance); + size = size + const _i2.AccountId32Codec().sizeHint(obj.target); + size = size + _i3.Conviction.codec.sizeHint(obj.conviction); + size = size + _i4.Delegations.codec.sizeHint(obj.delegations); + size = size + _i5.PriorLock.codec.sizeHint(obj.prior); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/prior_lock.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/prior_lock.dart new file mode 100644 index 00000000..34b7627f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/prior_lock.dart @@ -0,0 +1,56 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class PriorLock { + const PriorLock(this.value0, this.value1); + + factory PriorLock.decode(_i1.Input input) { + return codec.decode(input); + } + + /// BlockNumber + final int value0; + + /// Balance + final BigInt value1; + + static const $PriorLockCodec codec = $PriorLockCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + List toJson() => [value0, value1]; + + @override + bool operator ==(Object other) => + identical(this, other) || other is PriorLock && other.value0 == value0 && other.value1 == value1; + + @override + int get hashCode => Object.hash(value0, value1); +} + +class $PriorLockCodec with _i1.Codec { + const $PriorLockCodec(); + + @override + void encodeTo(PriorLock obj, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(obj.value0, output); + _i1.U128Codec.codec.encodeTo(obj.value1, output); + } + + @override + PriorLock decode(_i1.Input input) { + return PriorLock(_i1.U32Codec.codec.decode(input), _i1.U128Codec.codec.decode(input)); + } + + @override + int sizeHint(PriorLock obj) { + int size = 0; + size = size + _i1.U32Codec.codec.sizeHint(obj.value0); + size = size + _i1.U128Codec.codec.sizeHint(obj.value1); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/vote.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/vote.dart new file mode 100644 index 00000000..c78cd6dc --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/vote.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef Vote = int; + +class VoteCodec with _i1.Codec { + const VoteCodec(); + + @override + Vote decode(_i1.Input input) { + return _i1.U8Codec.codec.decode(input); + } + + @override + void encodeTo(Vote value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value, output); + } + + @override + int sizeHint(Vote value) { + return _i1.U8Codec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/voting.dart b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/voting.dart new file mode 100644 index 00000000..4c150bb6 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_conviction_voting/vote/voting.dart @@ -0,0 +1,148 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'casting.dart' as _i3; +import 'delegating.dart' as _i4; + +abstract class Voting { + const Voting(); + + factory Voting.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $VotingCodec codec = $VotingCodec(); + + static const $Voting values = $Voting(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Voting { + const $Voting(); + + Casting casting(_i3.Casting value0) { + return Casting(value0); + } + + Delegating delegating(_i4.Delegating value0) { + return Delegating(value0); + } +} + +class $VotingCodec with _i1.Codec { + const $VotingCodec(); + + @override + Voting decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Casting._decode(input); + case 1: + return Delegating._decode(input); + default: + throw Exception('Voting: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Voting value, _i1.Output output) { + switch (value.runtimeType) { + case Casting: + (value as Casting).encodeTo(output); + break; + case Delegating: + (value as Delegating).encodeTo(output); + break; + default: + throw Exception('Voting: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Voting value) { + switch (value.runtimeType) { + case Casting: + return (value as Casting)._sizeHint(); + case Delegating: + return (value as Delegating)._sizeHint(); + default: + throw Exception('Voting: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Casting extends Voting { + const Casting(this.value0); + + factory Casting._decode(_i1.Input input) { + return Casting(_i3.Casting.codec.decode(input)); + } + + /// Casting + final _i3.Casting value0; + + @override + Map> toJson() => {'Casting': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.Casting.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.Casting.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Casting && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Delegating extends Voting { + const Delegating(this.value0); + + factory Delegating._decode(_i1.Input input) { + return Delegating(_i4.Delegating.codec.decode(input)); + } + + /// Delegating + final _i4.Delegating value0; + + @override + Map> toJson() => {'Delegating': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i4.Delegating.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i4.Delegating.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Delegating && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_mining_rewards/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_mining_rewards/pallet/event.dart new file mode 100644 index 00000000..081e7da5 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_mining_rewards/pallet/event.dart @@ -0,0 +1,217 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + MinerRewarded minerRewarded({required _i3.AccountId32 miner, required BigInt reward}) { + return MinerRewarded(miner: miner, reward: reward); + } + + FeesCollected feesCollected({required BigInt amount, required BigInt total}) { + return FeesCollected(amount: amount, total: total); + } + + TreasuryRewarded treasuryRewarded({required BigInt reward}) { + return TreasuryRewarded(reward: reward); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return MinerRewarded._decode(input); + case 1: + return FeesCollected._decode(input); + case 2: + return TreasuryRewarded._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case MinerRewarded: + (value as MinerRewarded).encodeTo(output); + break; + case FeesCollected: + (value as FeesCollected).encodeTo(output); + break; + case TreasuryRewarded: + (value as TreasuryRewarded).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case MinerRewarded: + return (value as MinerRewarded)._sizeHint(); + case FeesCollected: + return (value as FeesCollected)._sizeHint(); + case TreasuryRewarded: + return (value as TreasuryRewarded)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A miner has been identified for a block +class MinerRewarded extends Event { + const MinerRewarded({required this.miner, required this.reward}); + + factory MinerRewarded._decode(_i1.Input input) { + return MinerRewarded(miner: const _i1.U8ArrayCodec(32).decode(input), reward: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + /// Miner account + final _i3.AccountId32 miner; + + /// BalanceOf + /// Total reward (base + fees) + final BigInt reward; + + @override + Map> toJson() => { + 'MinerRewarded': {'miner': miner.toList(), 'reward': reward}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(miner); + size = size + _i1.U128Codec.codec.sizeHint(reward); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(miner, output); + _i1.U128Codec.codec.encodeTo(reward, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is MinerRewarded && _i4.listsEqual(other.miner, miner) && other.reward == reward; + + @override + int get hashCode => Object.hash(miner, reward); +} + +/// Transaction fees were collected for later distribution +class FeesCollected extends Event { + const FeesCollected({required this.amount, required this.total}); + + factory FeesCollected._decode(_i1.Input input) { + return FeesCollected(amount: _i1.U128Codec.codec.decode(input), total: _i1.U128Codec.codec.decode(input)); + } + + /// BalanceOf + /// The amount collected + final BigInt amount; + + /// BalanceOf + /// Total fees waiting for distribution + final BigInt total; + + @override + Map> toJson() => { + 'FeesCollected': {'amount': amount, 'total': total}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(amount); + size = size + _i1.U128Codec.codec.sizeHint(total); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U128Codec.codec.encodeTo(amount, output); + _i1.U128Codec.codec.encodeTo(total, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is FeesCollected && other.amount == amount && other.total == total; + + @override + int get hashCode => Object.hash(amount, total); +} + +/// Rewards were sent to Treasury when no miner was specified +class TreasuryRewarded extends Event { + const TreasuryRewarded({required this.reward}); + + factory TreasuryRewarded._decode(_i1.Input input) { + return TreasuryRewarded(reward: _i1.U128Codec.codec.decode(input)); + } + + /// BalanceOf + /// Total reward (base + fees) + final BigInt reward; + + @override + Map> toJson() => { + 'TreasuryRewarded': {'reward': reward}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(reward); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U128Codec.codec.encodeTo(reward, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TreasuryRewarded && other.reward == reward; + + @override + int get hashCode => reward.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_multisig/multisig_data.dart b/quantus_sdk/lib/generated/planck/types/pallet_multisig/multisig_data.dart new file mode 100644 index 00000000..210b2b18 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_multisig/multisig_data.dart @@ -0,0 +1,123 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../bounded_collections/bounded_btree_map/bounded_b_tree_map.dart' as _i3; +import '../sp_core/crypto/account_id32.dart' as _i2; +import '../tuples.dart' as _i6; + +class MultisigData { + const MultisigData({ + required this.creator, + required this.signers, + required this.threshold, + required this.proposalNonce, + required this.deposit, + required this.activeProposals, + required this.proposalsPerSigner, + }); + + factory MultisigData.decode(_i1.Input input) { + return codec.decode(input); + } + + /// AccountId + final _i2.AccountId32 creator; + + /// BoundedSigners + final List<_i2.AccountId32> signers; + + /// u32 + final int threshold; + + /// u32 + final int proposalNonce; + + /// Balance + final BigInt deposit; + + /// u32 + final int activeProposals; + + /// BoundedProposalsPerSigner + final _i3.BoundedBTreeMap proposalsPerSigner; + + static const $MultisigDataCodec codec = $MultisigDataCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'creator': creator.toList(), + 'signers': signers.map((value) => value.toList()).toList(), + 'threshold': threshold, + 'proposalNonce': proposalNonce, + 'deposit': deposit, + 'activeProposals': activeProposals, + 'proposalsPerSigner': proposalsPerSigner.map((value) => [value.value0.toList(), value.value1]).toList(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is MultisigData && + _i5.listsEqual(other.creator, creator) && + _i5.listsEqual(other.signers, signers) && + other.threshold == threshold && + other.proposalNonce == proposalNonce && + other.deposit == deposit && + other.activeProposals == activeProposals && + other.proposalsPerSigner == proposalsPerSigner; + + @override + int get hashCode => + Object.hash(creator, signers, threshold, proposalNonce, deposit, activeProposals, proposalsPerSigner); +} + +class $MultisigDataCodec with _i1.Codec { + const $MultisigDataCodec(); + + @override + void encodeTo(MultisigData obj, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(obj.creator, output); + const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).encodeTo(obj.signers, output); + _i1.U32Codec.codec.encodeTo(obj.threshold, output); + _i1.U32Codec.codec.encodeTo(obj.proposalNonce, output); + _i1.U128Codec.codec.encodeTo(obj.deposit, output); + _i1.U32Codec.codec.encodeTo(obj.activeProposals, output); + const _i1.SequenceCodec<_i6.Tuple2<_i2.AccountId32, int>>( + _i6.Tuple2Codec<_i2.AccountId32, int>(_i2.AccountId32Codec(), _i1.U32Codec.codec), + ).encodeTo(obj.proposalsPerSigner, output); + } + + @override + MultisigData decode(_i1.Input input) { + return MultisigData( + creator: const _i1.U8ArrayCodec(32).decode(input), + signers: const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).decode(input), + threshold: _i1.U32Codec.codec.decode(input), + proposalNonce: _i1.U32Codec.codec.decode(input), + deposit: _i1.U128Codec.codec.decode(input), + activeProposals: _i1.U32Codec.codec.decode(input), + proposalsPerSigner: const _i1.SequenceCodec<_i6.Tuple2<_i2.AccountId32, int>>( + _i6.Tuple2Codec<_i2.AccountId32, int>(_i2.AccountId32Codec(), _i1.U32Codec.codec), + ).decode(input), + ); + } + + @override + int sizeHint(MultisigData obj) { + int size = 0; + size = size + const _i2.AccountId32Codec().sizeHint(obj.creator); + size = size + const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).sizeHint(obj.signers); + size = size + _i1.U32Codec.codec.sizeHint(obj.threshold); + size = size + _i1.U32Codec.codec.sizeHint(obj.proposalNonce); + size = size + _i1.U128Codec.codec.sizeHint(obj.deposit); + size = size + _i1.U32Codec.codec.sizeHint(obj.activeProposals); + size = size + const _i3.BoundedBTreeMapCodec().sizeHint(obj.proposalsPerSigner); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/call.dart new file mode 100644 index 00000000..fbb5fb20 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/call.dart @@ -0,0 +1,599 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + CreateMultisig createMultisig({ + required List<_i3.AccountId32> signers, + required int threshold, + required BigInt nonce, + }) { + return CreateMultisig(signers: signers, threshold: threshold, nonce: nonce); + } + + Propose propose({required _i3.AccountId32 multisigAddress, required List call, required int expiry}) { + return Propose(multisigAddress: multisigAddress, call: call, expiry: expiry); + } + + Approve approve({required _i3.AccountId32 multisigAddress, required int proposalId}) { + return Approve(multisigAddress: multisigAddress, proposalId: proposalId); + } + + Cancel cancel({required _i3.AccountId32 multisigAddress, required int proposalId}) { + return Cancel(multisigAddress: multisigAddress, proposalId: proposalId); + } + + RemoveExpired removeExpired({required _i3.AccountId32 multisigAddress, required int proposalId}) { + return RemoveExpired(multisigAddress: multisigAddress, proposalId: proposalId); + } + + ClaimDeposits claimDeposits({required _i3.AccountId32 multisigAddress}) { + return ClaimDeposits(multisigAddress: multisigAddress); + } + + Execute execute({required _i3.AccountId32 multisigAddress, required int proposalId}) { + return Execute(multisigAddress: multisigAddress, proposalId: proposalId); + } + + ApproveDissolve approveDissolve({required _i3.AccountId32 multisigAddress}) { + return ApproveDissolve(multisigAddress: multisigAddress); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return CreateMultisig._decode(input); + case 1: + return Propose._decode(input); + case 2: + return Approve._decode(input); + case 3: + return Cancel._decode(input); + case 4: + return RemoveExpired._decode(input); + case 5: + return ClaimDeposits._decode(input); + case 7: + return Execute._decode(input); + case 6: + return ApproveDissolve._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case CreateMultisig: + (value as CreateMultisig).encodeTo(output); + break; + case Propose: + (value as Propose).encodeTo(output); + break; + case Approve: + (value as Approve).encodeTo(output); + break; + case Cancel: + (value as Cancel).encodeTo(output); + break; + case RemoveExpired: + (value as RemoveExpired).encodeTo(output); + break; + case ClaimDeposits: + (value as ClaimDeposits).encodeTo(output); + break; + case Execute: + (value as Execute).encodeTo(output); + break; + case ApproveDissolve: + (value as ApproveDissolve).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case CreateMultisig: + return (value as CreateMultisig)._sizeHint(); + case Propose: + return (value as Propose)._sizeHint(); + case Approve: + return (value as Approve)._sizeHint(); + case Cancel: + return (value as Cancel)._sizeHint(); + case RemoveExpired: + return (value as RemoveExpired)._sizeHint(); + case ClaimDeposits: + return (value as ClaimDeposits)._sizeHint(); + case Execute: + return (value as Execute)._sizeHint(); + case ApproveDissolve: + return (value as ApproveDissolve)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Create a new multisig account with deterministic address +/// +/// Parameters: +/// - `signers`: List of accounts that can sign for this multisig +/// - `threshold`: Number of approvals required to execute transactions +/// - `nonce`: User-provided nonce for address uniqueness +/// +/// The multisig address is deterministically derived from: +/// hash(pallet_id || sorted_signers || threshold || nonce) +/// +/// Signers are automatically sorted before hashing, so order doesn't matter. +/// +/// Economic costs: +/// - MultisigFee: burned immediately (spam prevention) +/// - MultisigDeposit: reserved until dissolution, then returned to creator (storage bond) +class CreateMultisig extends Call { + const CreateMultisig({required this.signers, required this.threshold, required this.nonce}); + + factory CreateMultisig._decode(_i1.Input input) { + return CreateMultisig( + signers: const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).decode(input), + threshold: _i1.U32Codec.codec.decode(input), + nonce: _i1.U64Codec.codec.decode(input), + ); + } + + /// Vec + final List<_i3.AccountId32> signers; + + /// u32 + final int threshold; + + /// u64 + final BigInt nonce; + + @override + Map> toJson() => { + 'create_multisig': { + 'signers': signers.map((value) => value.toList()).toList(), + 'threshold': threshold, + 'nonce': nonce, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).sizeHint(signers); + size = size + _i1.U32Codec.codec.sizeHint(threshold); + size = size + _i1.U64Codec.codec.sizeHint(nonce); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).encodeTo(signers, output); + _i1.U32Codec.codec.encodeTo(threshold, output); + _i1.U64Codec.codec.encodeTo(nonce, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is CreateMultisig && + _i4.listsEqual(other.signers, signers) && + other.threshold == threshold && + other.nonce == nonce; + + @override + int get hashCode => Object.hash(signers, threshold, nonce); +} + +/// Propose a transaction to be executed by the multisig +/// +/// Parameters: +/// - `multisig_address`: The multisig account that will execute the call +/// - `call`: The encoded call to execute +/// - `expiry`: Block number when this proposal expires +/// +/// The proposer must be a signer and must pay: +/// - A deposit (refundable - returned immediately on execution/cancellation) +/// - A fee (non-refundable, burned immediately) +/// +/// **Auto-cleanup:** Before creating a new proposal, ALL proposer's expired +/// proposals are automatically removed. This is the primary cleanup mechanism. +/// +/// **For threshold=1:** If the multisig threshold is 1, the proposal executes immediately. +/// +/// **Weight:** Charged upfront for worst-case (high-security path with decode). +/// Refunded to actual cost on success based on whether HS path was taken. +class Propose extends Call { + const Propose({required this.multisigAddress, required this.call, required this.expiry}); + + factory Propose._decode(_i1.Input input) { + return Propose( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + call: _i1.U8SequenceCodec.codec.decode(input), + expiry: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// Vec + final List call; + + /// BlockNumberFor + final int expiry; + + @override + Map> toJson() => { + 'propose': {'multisigAddress': multisigAddress.toList(), 'call': call, 'expiry': expiry}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + _i1.U8SequenceCodec.codec.sizeHint(call); + size = size + _i1.U32Codec.codec.sizeHint(expiry); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + _i1.U8SequenceCodec.codec.encodeTo(call, output); + _i1.U32Codec.codec.encodeTo(expiry, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Propose && + _i4.listsEqual(other.multisigAddress, multisigAddress) && + _i4.listsEqual(other.call, call) && + other.expiry == expiry; + + @override + int get hashCode => Object.hash(multisigAddress, call, expiry); +} + +/// Approve a proposed transaction +/// +/// If this approval brings the total approvals to or above the threshold, +/// the proposal status changes to `Approved` and can be executed via `execute()`. +/// +/// Parameters: +/// - `multisig_address`: The multisig account +/// - `proposal_id`: ID (nonce) of the proposal to approve +/// +/// Weight: Charges for MAX call size, refunds based on actual +class Approve extends Call { + const Approve({required this.multisigAddress, required this.proposalId}); + + factory Approve._decode(_i1.Input input) { + return Approve( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// u32 + final int proposalId; + + @override + Map> toJson() => { + 'approve': {'multisigAddress': multisigAddress.toList(), 'proposalId': proposalId}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Approve && _i4.listsEqual(other.multisigAddress, multisigAddress) && other.proposalId == proposalId; + + @override + int get hashCode => Object.hash(multisigAddress, proposalId); +} + +/// Cancel a proposed transaction (only by proposer) +/// +/// Parameters: +/// - `multisig_address`: The multisig account +/// - `proposal_id`: ID (nonce) of the proposal to cancel +class Cancel extends Call { + const Cancel({required this.multisigAddress, required this.proposalId}); + + factory Cancel._decode(_i1.Input input) { + return Cancel( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// u32 + final int proposalId; + + @override + Map> toJson() => { + 'cancel': {'multisigAddress': multisigAddress.toList(), 'proposalId': proposalId}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Cancel && _i4.listsEqual(other.multisigAddress, multisigAddress) && other.proposalId == proposalId; + + @override + int get hashCode => Object.hash(multisigAddress, proposalId); +} + +/// Remove expired proposals and return deposits to proposers +/// +/// Can only be called by signers of the multisig. +/// Only removes Active proposals that have expired (past expiry block). +/// Executed and Cancelled proposals are automatically cleaned up immediately. +/// +/// The deposit is always returned to the original proposer, not the caller. +/// This allows any signer to help clean up storage even if proposer is inactive. +class RemoveExpired extends Call { + const RemoveExpired({required this.multisigAddress, required this.proposalId}); + + factory RemoveExpired._decode(_i1.Input input) { + return RemoveExpired( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// u32 + final int proposalId; + + @override + Map> toJson() => { + 'remove_expired': {'multisigAddress': multisigAddress.toList(), 'proposalId': proposalId}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RemoveExpired && + _i4.listsEqual(other.multisigAddress, multisigAddress) && + other.proposalId == proposalId; + + @override + int get hashCode => Object.hash(multisigAddress, proposalId); +} + +/// Claim all deposits from expired proposals +/// +/// This is a batch operation that removes all expired proposals where: +/// - Caller is the proposer +/// - Proposal is Active and past expiry block +/// +/// Note: Executed and Cancelled proposals are automatically cleaned up immediately, +/// so only Active+Expired proposals need manual cleanup. +/// +/// Returns all proposal deposits to the proposer in a single transaction. +class ClaimDeposits extends Call { + const ClaimDeposits({required this.multisigAddress}); + + factory ClaimDeposits._decode(_i1.Input input) { + return ClaimDeposits(multisigAddress: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + @override + Map>> toJson() => { + 'claim_deposits': {'multisigAddress': multisigAddress.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ClaimDeposits && _i4.listsEqual(other.multisigAddress, multisigAddress); + + @override + int get hashCode => multisigAddress.hashCode; +} + +/// Execute an approved proposal +/// +/// Can be called by any signer of the multisig once the proposal has reached +/// the approval threshold (status = Approved). The proposal must not be expired. +/// +/// On execution: +/// - The call is decoded and dispatched as the multisig account +/// - Proposal is removed from storage +/// - Deposit is returned to the proposer +/// +/// Parameters: +/// - `multisig_address`: The multisig account +/// - `proposal_id`: ID (nonce) of the proposal to execute +class Execute extends Call { + const Execute({required this.multisigAddress, required this.proposalId}); + + factory Execute._decode(_i1.Input input) { + return Execute( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// u32 + final int proposalId; + + @override + Map> toJson() => { + 'execute': {'multisigAddress': multisigAddress.toList(), 'proposalId': proposalId}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Execute && _i4.listsEqual(other.multisigAddress, multisigAddress) && other.proposalId == proposalId; + + @override + int get hashCode => Object.hash(multisigAddress, proposalId); +} + +/// Approve dissolving a multisig account +/// +/// Signers call this to approve dissolving the multisig. +/// When threshold is reached, the multisig is automatically dissolved. +/// +/// Requirements: +/// - Caller must be a signer +/// - No proposals exist (active, executed, or cancelled) - must be fully cleaned up +/// - Multisig account balance must be zero +/// +/// When threshold is reached: +/// - Deposit is returned to creator +/// - Multisig storage is removed +class ApproveDissolve extends Call { + const ApproveDissolve({required this.multisigAddress}); + + factory ApproveDissolve._decode(_i1.Input input) { + return ApproveDissolve(multisigAddress: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + @override + Map>> toJson() => { + 'approve_dissolve': {'multisigAddress': multisigAddress.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ApproveDissolve && _i4.listsEqual(other.multisigAddress, multisigAddress); + + @override + int get hashCode => multisigAddress.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/error.dart new file mode 100644 index 00000000..f9df4998 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/error.dart @@ -0,0 +1,178 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Not enough signers provided + notEnoughSigners('NotEnoughSigners', 0), + + /// Threshold must be greater than zero + thresholdZero('ThresholdZero', 1), + + /// Threshold exceeds number of signers + thresholdTooHigh('ThresholdTooHigh', 2), + + /// Too many signers + tooManySigners('TooManySigners', 3), + + /// Duplicate signer in list + duplicateSigner('DuplicateSigner', 4), + + /// Multisig already exists + multisigAlreadyExists('MultisigAlreadyExists', 5), + + /// Multisig not found + multisigNotFound('MultisigNotFound', 6), + + /// Caller is not a signer of this multisig + notASigner('NotASigner', 7), + + /// Proposal not found + proposalNotFound('ProposalNotFound', 8), + + /// Caller is not the proposer + notProposer('NotProposer', 9), + + /// Already approved by this signer + alreadyApproved('AlreadyApproved', 10), + + /// Not enough approvals to execute + notEnoughApprovals('NotEnoughApprovals', 11), + + /// Proposal expiry is in the past + expiryInPast('ExpiryInPast', 12), + + /// Proposal expiry is too far in the future (exceeds MaxExpiryDuration) + expiryTooFar('ExpiryTooFar', 13), + + /// Proposal has expired + proposalExpired('ProposalExpired', 14), + + /// Call data too large + callTooLarge('CallTooLarge', 15), + + /// Failed to decode call data + invalidCall('InvalidCall', 16), + + /// Too many total proposals in storage for this multisig (cleanup required) + tooManyProposalsInStorage('TooManyProposalsInStorage', 17), + + /// This signer has too many proposals in storage (filibuster protection) + tooManyProposalsPerSigner('TooManyProposalsPerSigner', 18), + + /// Insufficient balance for deposit + insufficientBalance('InsufficientBalance', 19), + + /// Proposal has active deposit + proposalHasDeposit('ProposalHasDeposit', 20), + + /// Proposal has not expired yet + proposalNotExpired('ProposalNotExpired', 21), + + /// Proposal is not active (already executed or cancelled) + proposalNotActive('ProposalNotActive', 22), + + /// Proposal has not been approved yet (threshold not reached) + proposalNotApproved('ProposalNotApproved', 23), + + /// Cannot dissolve multisig with existing proposals (clear them first) + proposalsExist('ProposalsExist', 24), + + /// Multisig account must have zero balance before dissolution + multisigAccountNotZero('MultisigAccountNotZero', 25), + + /// Call is not allowed for high-security multisig + callNotAllowedForHighSecurityMultisig('CallNotAllowedForHighSecurityMultisig', 26); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.notEnoughSigners; + case 1: + return Error.thresholdZero; + case 2: + return Error.thresholdTooHigh; + case 3: + return Error.tooManySigners; + case 4: + return Error.duplicateSigner; + case 5: + return Error.multisigAlreadyExists; + case 6: + return Error.multisigNotFound; + case 7: + return Error.notASigner; + case 8: + return Error.proposalNotFound; + case 9: + return Error.notProposer; + case 10: + return Error.alreadyApproved; + case 11: + return Error.notEnoughApprovals; + case 12: + return Error.expiryInPast; + case 13: + return Error.expiryTooFar; + case 14: + return Error.proposalExpired; + case 15: + return Error.callTooLarge; + case 16: + return Error.invalidCall; + case 17: + return Error.tooManyProposalsInStorage; + case 18: + return Error.tooManyProposalsPerSigner; + case 19: + return Error.insufficientBalance; + case 20: + return Error.proposalHasDeposit; + case 21: + return Error.proposalNotExpired; + case 22: + return Error.proposalNotActive; + case 23: + return Error.proposalNotApproved; + case 24: + return Error.proposalsExist; + case 25: + return Error.multisigAccountNotZero; + case 26: + return Error.callNotAllowedForHighSecurityMultisig; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/event.dart new file mode 100644 index 00000000..a791a3ea --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_multisig/pallet/event.dart @@ -0,0 +1,942 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../../sp_core/crypto/account_id32.dart' as _i3; +import '../../sp_runtime/dispatch_error.dart' as _i4; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + MultisigCreated multisigCreated({ + required _i3.AccountId32 creator, + required _i3.AccountId32 multisigAddress, + required List<_i3.AccountId32> signers, + required int threshold, + required BigInt nonce, + }) { + return MultisigCreated( + creator: creator, + multisigAddress: multisigAddress, + signers: signers, + threshold: threshold, + nonce: nonce, + ); + } + + ProposalCreated proposalCreated({ + required _i3.AccountId32 multisigAddress, + required _i3.AccountId32 proposer, + required int proposalId, + }) { + return ProposalCreated(multisigAddress: multisigAddress, proposer: proposer, proposalId: proposalId); + } + + ProposalApproved proposalApproved({ + required _i3.AccountId32 multisigAddress, + required _i3.AccountId32 approver, + required int proposalId, + required int approvalsCount, + }) { + return ProposalApproved( + multisigAddress: multisigAddress, + approver: approver, + proposalId: proposalId, + approvalsCount: approvalsCount, + ); + } + + ProposalReadyToExecute proposalReadyToExecute({ + required _i3.AccountId32 multisigAddress, + required int proposalId, + required int approvalsCount, + }) { + return ProposalReadyToExecute( + multisigAddress: multisigAddress, + proposalId: proposalId, + approvalsCount: approvalsCount, + ); + } + + ProposalExecuted proposalExecuted({ + required _i3.AccountId32 multisigAddress, + required int proposalId, + required _i3.AccountId32 proposer, + required List call, + required List<_i3.AccountId32> approvers, + required _i1.Result result, + }) { + return ProposalExecuted( + multisigAddress: multisigAddress, + proposalId: proposalId, + proposer: proposer, + call: call, + approvers: approvers, + result: result, + ); + } + + ProposalCancelled proposalCancelled({ + required _i3.AccountId32 multisigAddress, + required _i3.AccountId32 proposer, + required int proposalId, + }) { + return ProposalCancelled(multisigAddress: multisigAddress, proposer: proposer, proposalId: proposalId); + } + + ProposalRemoved proposalRemoved({ + required _i3.AccountId32 multisigAddress, + required int proposalId, + required _i3.AccountId32 proposer, + required _i3.AccountId32 removedBy, + }) { + return ProposalRemoved( + multisigAddress: multisigAddress, + proposalId: proposalId, + proposer: proposer, + removedBy: removedBy, + ); + } + + DepositsClaimed depositsClaimed({ + required _i3.AccountId32 multisigAddress, + required _i3.AccountId32 claimer, + required BigInt totalReturned, + required int proposalsRemoved, + required bool multisigRemoved, + }) { + return DepositsClaimed( + multisigAddress: multisigAddress, + claimer: claimer, + totalReturned: totalReturned, + proposalsRemoved: proposalsRemoved, + multisigRemoved: multisigRemoved, + ); + } + + DissolveApproved dissolveApproved({ + required _i3.AccountId32 multisigAddress, + required _i3.AccountId32 approver, + required int approvalsCount, + }) { + return DissolveApproved(multisigAddress: multisigAddress, approver: approver, approvalsCount: approvalsCount); + } + + MultisigDissolved multisigDissolved({ + required _i3.AccountId32 multisigAddress, + required _i3.AccountId32 depositReturned, + required List<_i3.AccountId32> approvers, + }) { + return MultisigDissolved(multisigAddress: multisigAddress, depositReturned: depositReturned, approvers: approvers); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return MultisigCreated._decode(input); + case 1: + return ProposalCreated._decode(input); + case 2: + return ProposalApproved._decode(input); + case 3: + return ProposalReadyToExecute._decode(input); + case 4: + return ProposalExecuted._decode(input); + case 5: + return ProposalCancelled._decode(input); + case 6: + return ProposalRemoved._decode(input); + case 7: + return DepositsClaimed._decode(input); + case 8: + return DissolveApproved._decode(input); + case 9: + return MultisigDissolved._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case MultisigCreated: + (value as MultisigCreated).encodeTo(output); + break; + case ProposalCreated: + (value as ProposalCreated).encodeTo(output); + break; + case ProposalApproved: + (value as ProposalApproved).encodeTo(output); + break; + case ProposalReadyToExecute: + (value as ProposalReadyToExecute).encodeTo(output); + break; + case ProposalExecuted: + (value as ProposalExecuted).encodeTo(output); + break; + case ProposalCancelled: + (value as ProposalCancelled).encodeTo(output); + break; + case ProposalRemoved: + (value as ProposalRemoved).encodeTo(output); + break; + case DepositsClaimed: + (value as DepositsClaimed).encodeTo(output); + break; + case DissolveApproved: + (value as DissolveApproved).encodeTo(output); + break; + case MultisigDissolved: + (value as MultisigDissolved).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case MultisigCreated: + return (value as MultisigCreated)._sizeHint(); + case ProposalCreated: + return (value as ProposalCreated)._sizeHint(); + case ProposalApproved: + return (value as ProposalApproved)._sizeHint(); + case ProposalReadyToExecute: + return (value as ProposalReadyToExecute)._sizeHint(); + case ProposalExecuted: + return (value as ProposalExecuted)._sizeHint(); + case ProposalCancelled: + return (value as ProposalCancelled)._sizeHint(); + case ProposalRemoved: + return (value as ProposalRemoved)._sizeHint(); + case DepositsClaimed: + return (value as DepositsClaimed)._sizeHint(); + case DissolveApproved: + return (value as DissolveApproved)._sizeHint(); + case MultisigDissolved: + return (value as MultisigDissolved)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A new multisig account was created +/// [creator, multisig_address, signers, threshold, nonce] +class MultisigCreated extends Event { + const MultisigCreated({ + required this.creator, + required this.multisigAddress, + required this.signers, + required this.threshold, + required this.nonce, + }); + + factory MultisigCreated._decode(_i1.Input input) { + return MultisigCreated( + creator: const _i1.U8ArrayCodec(32).decode(input), + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + signers: const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).decode(input), + threshold: _i1.U32Codec.codec.decode(input), + nonce: _i1.U64Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 creator; + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// Vec + final List<_i3.AccountId32> signers; + + /// u32 + final int threshold; + + /// u64 + final BigInt nonce; + + @override + Map> toJson() => { + 'MultisigCreated': { + 'creator': creator.toList(), + 'multisigAddress': multisigAddress.toList(), + 'signers': signers.map((value) => value.toList()).toList(), + 'threshold': threshold, + 'nonce': nonce, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(creator); + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).sizeHint(signers); + size = size + _i1.U32Codec.codec.sizeHint(threshold); + size = size + _i1.U64Codec.codec.sizeHint(nonce); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(creator, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).encodeTo(signers, output); + _i1.U32Codec.codec.encodeTo(threshold, output); + _i1.U64Codec.codec.encodeTo(nonce, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is MultisigCreated && + _i5.listsEqual(other.creator, creator) && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + _i5.listsEqual(other.signers, signers) && + other.threshold == threshold && + other.nonce == nonce; + + @override + int get hashCode => Object.hash(creator, multisigAddress, signers, threshold, nonce); +} + +/// A proposal has been created +class ProposalCreated extends Event { + const ProposalCreated({required this.multisigAddress, required this.proposer, required this.proposalId}); + + factory ProposalCreated._decode(_i1.Input input) { + return ProposalCreated( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposer: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// T::AccountId + final _i3.AccountId32 proposer; + + /// u32 + final int proposalId; + + @override + Map> toJson() => { + 'ProposalCreated': { + 'multisigAddress': multisigAddress.toList(), + 'proposer': proposer.toList(), + 'proposalId': proposalId, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + const _i3.AccountId32Codec().sizeHint(proposer); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + const _i1.U8ArrayCodec(32).encodeTo(proposer, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProposalCreated && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + _i5.listsEqual(other.proposer, proposer) && + other.proposalId == proposalId; + + @override + int get hashCode => Object.hash(multisigAddress, proposer, proposalId); +} + +/// A proposal has been approved by a signer +class ProposalApproved extends Event { + const ProposalApproved({ + required this.multisigAddress, + required this.approver, + required this.proposalId, + required this.approvalsCount, + }); + + factory ProposalApproved._decode(_i1.Input input) { + return ProposalApproved( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + approver: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + approvalsCount: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// T::AccountId + final _i3.AccountId32 approver; + + /// u32 + final int proposalId; + + /// u32 + final int approvalsCount; + + @override + Map> toJson() => { + 'ProposalApproved': { + 'multisigAddress': multisigAddress.toList(), + 'approver': approver.toList(), + 'proposalId': proposalId, + 'approvalsCount': approvalsCount, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + const _i3.AccountId32Codec().sizeHint(approver); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + size = size + _i1.U32Codec.codec.sizeHint(approvalsCount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + const _i1.U8ArrayCodec(32).encodeTo(approver, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + _i1.U32Codec.codec.encodeTo(approvalsCount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProposalApproved && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + _i5.listsEqual(other.approver, approver) && + other.proposalId == proposalId && + other.approvalsCount == approvalsCount; + + @override + int get hashCode => Object.hash(multisigAddress, approver, proposalId, approvalsCount); +} + +/// A proposal has reached threshold and is ready to execute +class ProposalReadyToExecute extends Event { + const ProposalReadyToExecute({required this.multisigAddress, required this.proposalId, required this.approvalsCount}); + + factory ProposalReadyToExecute._decode(_i1.Input input) { + return ProposalReadyToExecute( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + approvalsCount: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// u32 + final int proposalId; + + /// u32 + final int approvalsCount; + + @override + Map> toJson() => { + 'ProposalReadyToExecute': { + 'multisigAddress': multisigAddress.toList(), + 'proposalId': proposalId, + 'approvalsCount': approvalsCount, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + size = size + _i1.U32Codec.codec.sizeHint(approvalsCount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + _i1.U32Codec.codec.encodeTo(approvalsCount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProposalReadyToExecute && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + other.proposalId == proposalId && + other.approvalsCount == approvalsCount; + + @override + int get hashCode => Object.hash(multisigAddress, proposalId, approvalsCount); +} + +/// A proposal has been executed +/// Contains all data needed for indexing by SubSquid +class ProposalExecuted extends Event { + const ProposalExecuted({ + required this.multisigAddress, + required this.proposalId, + required this.proposer, + required this.call, + required this.approvers, + required this.result, + }); + + factory ProposalExecuted._decode(_i1.Input input) { + return ProposalExecuted( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + proposer: const _i1.U8ArrayCodec(32).decode(input), + call: _i1.U8SequenceCodec.codec.decode(input), + approvers: const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).decode(input), + result: const _i1.ResultCodec( + _i1.NullCodec.codec, + _i4.DispatchError.codec, + ).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// u32 + final int proposalId; + + /// T::AccountId + final _i3.AccountId32 proposer; + + /// Vec + final List call; + + /// Vec + final List<_i3.AccountId32> approvers; + + /// DispatchResult + final _i1.Result result; + + @override + Map> toJson() => { + 'ProposalExecuted': { + 'multisigAddress': multisigAddress.toList(), + 'proposalId': proposalId, + 'proposer': proposer.toList(), + 'call': call, + 'approvers': approvers.map((value) => value.toList()).toList(), + 'result': result.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + size = size + const _i3.AccountId32Codec().sizeHint(proposer); + size = size + _i1.U8SequenceCodec.codec.sizeHint(call); + size = size + const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).sizeHint(approvers); + size = + size + + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i4.DispatchError.codec, + ).sizeHint(result); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + const _i1.U8ArrayCodec(32).encodeTo(proposer, output); + _i1.U8SequenceCodec.codec.encodeTo(call, output); + const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).encodeTo(approvers, output); + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i4.DispatchError.codec, + ).encodeTo(result, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProposalExecuted && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + other.proposalId == proposalId && + _i5.listsEqual(other.proposer, proposer) && + _i5.listsEqual(other.call, call) && + _i5.listsEqual(other.approvers, approvers) && + other.result == result; + + @override + int get hashCode => Object.hash(multisigAddress, proposalId, proposer, call, approvers, result); +} + +/// A proposal has been cancelled by the proposer +class ProposalCancelled extends Event { + const ProposalCancelled({required this.multisigAddress, required this.proposer, required this.proposalId}); + + factory ProposalCancelled._decode(_i1.Input input) { + return ProposalCancelled( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposer: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// T::AccountId + final _i3.AccountId32 proposer; + + /// u32 + final int proposalId; + + @override + Map> toJson() => { + 'ProposalCancelled': { + 'multisigAddress': multisigAddress.toList(), + 'proposer': proposer.toList(), + 'proposalId': proposalId, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + const _i3.AccountId32Codec().sizeHint(proposer); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + const _i1.U8ArrayCodec(32).encodeTo(proposer, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProposalCancelled && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + _i5.listsEqual(other.proposer, proposer) && + other.proposalId == proposalId; + + @override + int get hashCode => Object.hash(multisigAddress, proposer, proposalId); +} + +/// Expired proposal was removed from storage +class ProposalRemoved extends Event { + const ProposalRemoved({ + required this.multisigAddress, + required this.proposalId, + required this.proposer, + required this.removedBy, + }); + + factory ProposalRemoved._decode(_i1.Input input) { + return ProposalRemoved( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + proposalId: _i1.U32Codec.codec.decode(input), + proposer: const _i1.U8ArrayCodec(32).decode(input), + removedBy: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// u32 + final int proposalId; + + /// T::AccountId + final _i3.AccountId32 proposer; + + /// T::AccountId + final _i3.AccountId32 removedBy; + + @override + Map> toJson() => { + 'ProposalRemoved': { + 'multisigAddress': multisigAddress.toList(), + 'proposalId': proposalId, + 'proposer': proposer.toList(), + 'removedBy': removedBy.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + _i1.U32Codec.codec.sizeHint(proposalId); + size = size + const _i3.AccountId32Codec().sizeHint(proposer); + size = size + const _i3.AccountId32Codec().sizeHint(removedBy); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + _i1.U32Codec.codec.encodeTo(proposalId, output); + const _i1.U8ArrayCodec(32).encodeTo(proposer, output); + const _i1.U8ArrayCodec(32).encodeTo(removedBy, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProposalRemoved && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + other.proposalId == proposalId && + _i5.listsEqual(other.proposer, proposer) && + _i5.listsEqual(other.removedBy, removedBy); + + @override + int get hashCode => Object.hash(multisigAddress, proposalId, proposer, removedBy); +} + +/// Batch deposits claimed +class DepositsClaimed extends Event { + const DepositsClaimed({ + required this.multisigAddress, + required this.claimer, + required this.totalReturned, + required this.proposalsRemoved, + required this.multisigRemoved, + }); + + factory DepositsClaimed._decode(_i1.Input input) { + return DepositsClaimed( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + claimer: const _i1.U8ArrayCodec(32).decode(input), + totalReturned: _i1.U128Codec.codec.decode(input), + proposalsRemoved: _i1.U32Codec.codec.decode(input), + multisigRemoved: _i1.BoolCodec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// T::AccountId + final _i3.AccountId32 claimer; + + /// BalanceOf + final BigInt totalReturned; + + /// u32 + final int proposalsRemoved; + + /// bool + final bool multisigRemoved; + + @override + Map> toJson() => { + 'DepositsClaimed': { + 'multisigAddress': multisigAddress.toList(), + 'claimer': claimer.toList(), + 'totalReturned': totalReturned, + 'proposalsRemoved': proposalsRemoved, + 'multisigRemoved': multisigRemoved, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + const _i3.AccountId32Codec().sizeHint(claimer); + size = size + _i1.U128Codec.codec.sizeHint(totalReturned); + size = size + _i1.U32Codec.codec.sizeHint(proposalsRemoved); + size = size + _i1.BoolCodec.codec.sizeHint(multisigRemoved); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + const _i1.U8ArrayCodec(32).encodeTo(claimer, output); + _i1.U128Codec.codec.encodeTo(totalReturned, output); + _i1.U32Codec.codec.encodeTo(proposalsRemoved, output); + _i1.BoolCodec.codec.encodeTo(multisigRemoved, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DepositsClaimed && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + _i5.listsEqual(other.claimer, claimer) && + other.totalReturned == totalReturned && + other.proposalsRemoved == proposalsRemoved && + other.multisigRemoved == multisigRemoved; + + @override + int get hashCode => Object.hash(multisigAddress, claimer, totalReturned, proposalsRemoved, multisigRemoved); +} + +/// A signer approved dissolving the multisig +class DissolveApproved extends Event { + const DissolveApproved({required this.multisigAddress, required this.approver, required this.approvalsCount}); + + factory DissolveApproved._decode(_i1.Input input) { + return DissolveApproved( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + approver: const _i1.U8ArrayCodec(32).decode(input), + approvalsCount: _i1.U32Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// T::AccountId + final _i3.AccountId32 approver; + + /// u32 + final int approvalsCount; + + @override + Map> toJson() => { + 'DissolveApproved': { + 'multisigAddress': multisigAddress.toList(), + 'approver': approver.toList(), + 'approvalsCount': approvalsCount, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + const _i3.AccountId32Codec().sizeHint(approver); + size = size + _i1.U32Codec.codec.sizeHint(approvalsCount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + const _i1.U8ArrayCodec(32).encodeTo(approver, output); + _i1.U32Codec.codec.encodeTo(approvalsCount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DissolveApproved && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + _i5.listsEqual(other.approver, approver) && + other.approvalsCount == approvalsCount; + + @override + int get hashCode => Object.hash(multisigAddress, approver, approvalsCount); +} + +/// A multisig account was dissolved (threshold reached) +class MultisigDissolved extends Event { + const MultisigDissolved({required this.multisigAddress, required this.depositReturned, required this.approvers}); + + factory MultisigDissolved._decode(_i1.Input input) { + return MultisigDissolved( + multisigAddress: const _i1.U8ArrayCodec(32).decode(input), + depositReturned: const _i1.U8ArrayCodec(32).decode(input), + approvers: const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 multisigAddress; + + /// T::AccountId + final _i3.AccountId32 depositReturned; + + /// Vec + final List<_i3.AccountId32> approvers; + + @override + Map>> toJson() => { + 'MultisigDissolved': { + 'multisigAddress': multisigAddress.toList(), + 'depositReturned': depositReturned.toList(), + 'approvers': approvers.map((value) => value.toList()).toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(multisigAddress); + size = size + const _i3.AccountId32Codec().sizeHint(depositReturned); + size = size + const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).sizeHint(approvers); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + const _i1.U8ArrayCodec(32).encodeTo(multisigAddress, output); + const _i1.U8ArrayCodec(32).encodeTo(depositReturned, output); + const _i1.SequenceCodec<_i3.AccountId32>(_i3.AccountId32Codec()).encodeTo(approvers, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is MultisigDissolved && + _i5.listsEqual(other.multisigAddress, multisigAddress) && + _i5.listsEqual(other.depositReturned, depositReturned) && + _i5.listsEqual(other.approvers, approvers); + + @override + int get hashCode => Object.hash(multisigAddress, depositReturned, approvers); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_multisig/proposal_data.dart b/quantus_sdk/lib/generated/planck/types/pallet_multisig/proposal_data.dart new file mode 100644 index 00000000..13274bad --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_multisig/proposal_data.dart @@ -0,0 +1,108 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../sp_core/crypto/account_id32.dart' as _i2; +import 'proposal_status.dart' as _i3; + +class ProposalData { + const ProposalData({ + required this.proposer, + required this.call, + required this.expiry, + required this.approvals, + required this.deposit, + required this.status, + }); + + factory ProposalData.decode(_i1.Input input) { + return codec.decode(input); + } + + /// AccountId + final _i2.AccountId32 proposer; + + /// BoundedCall + final List call; + + /// BlockNumber + final int expiry; + + /// BoundedApprovals + final List<_i2.AccountId32> approvals; + + /// Balance + final BigInt deposit; + + /// ProposalStatus + final _i3.ProposalStatus status; + + static const $ProposalDataCodec codec = $ProposalDataCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'proposer': proposer.toList(), + 'call': call, + 'expiry': expiry, + 'approvals': approvals.map((value) => value.toList()).toList(), + 'deposit': deposit, + 'status': status.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProposalData && + _i5.listsEqual(other.proposer, proposer) && + _i5.listsEqual(other.call, call) && + other.expiry == expiry && + _i5.listsEqual(other.approvals, approvals) && + other.deposit == deposit && + other.status == status; + + @override + int get hashCode => Object.hash(proposer, call, expiry, approvals, deposit, status); +} + +class $ProposalDataCodec with _i1.Codec { + const $ProposalDataCodec(); + + @override + void encodeTo(ProposalData obj, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(obj.proposer, output); + _i1.U8SequenceCodec.codec.encodeTo(obj.call, output); + _i1.U32Codec.codec.encodeTo(obj.expiry, output); + const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).encodeTo(obj.approvals, output); + _i1.U128Codec.codec.encodeTo(obj.deposit, output); + _i3.ProposalStatus.codec.encodeTo(obj.status, output); + } + + @override + ProposalData decode(_i1.Input input) { + return ProposalData( + proposer: const _i1.U8ArrayCodec(32).decode(input), + call: _i1.U8SequenceCodec.codec.decode(input), + expiry: _i1.U32Codec.codec.decode(input), + approvals: const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).decode(input), + deposit: _i1.U128Codec.codec.decode(input), + status: _i3.ProposalStatus.codec.decode(input), + ); + } + + @override + int sizeHint(ProposalData obj) { + int size = 0; + size = size + const _i2.AccountId32Codec().sizeHint(obj.proposer); + size = size + _i1.U8SequenceCodec.codec.sizeHint(obj.call); + size = size + _i1.U32Codec.codec.sizeHint(obj.expiry); + size = size + const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).sizeHint(obj.approvals); + size = size + _i1.U128Codec.codec.sizeHint(obj.deposit); + size = size + _i3.ProposalStatus.codec.sizeHint(obj.status); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_multisig/proposal_status.dart b/quantus_sdk/lib/generated/planck/types/pallet_multisig/proposal_status.dart new file mode 100644 index 00000000..53c02569 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_multisig/proposal_status.dart @@ -0,0 +1,55 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum ProposalStatus { + active('Active', 0), + approved('Approved', 1), + executed('Executed', 2), + cancelled('Cancelled', 3); + + const ProposalStatus(this.variantName, this.codecIndex); + + factory ProposalStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ProposalStatusCodec codec = $ProposalStatusCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ProposalStatusCodec with _i1.Codec { + const $ProposalStatusCodec(); + + @override + ProposalStatus decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return ProposalStatus.active; + case 1: + return ProposalStatus.approved; + case 2: + return ProposalStatus.executed; + case 3: + return ProposalStatus.cancelled; + default: + throw Exception('ProposalStatus: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(ProposalStatus value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_preimage/old_request_status.dart b/quantus_sdk/lib/generated/planck/types/pallet_preimage/old_request_status.dart new file mode 100644 index 00000000..69303e64 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_preimage/old_request_status.dart @@ -0,0 +1,200 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../sp_core/crypto/account_id32.dart' as _i4; +import '../tuples.dart' as _i3; + +abstract class OldRequestStatus { + const OldRequestStatus(); + + factory OldRequestStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $OldRequestStatusCodec codec = $OldRequestStatusCodec(); + + static const $OldRequestStatus values = $OldRequestStatus(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $OldRequestStatus { + const $OldRequestStatus(); + + Unrequested unrequested({required _i3.Tuple2<_i4.AccountId32, BigInt> deposit, required int len}) { + return Unrequested(deposit: deposit, len: len); + } + + Requested requested({_i3.Tuple2<_i4.AccountId32, BigInt>? deposit, required int count, int? len}) { + return Requested(deposit: deposit, count: count, len: len); + } +} + +class $OldRequestStatusCodec with _i1.Codec { + const $OldRequestStatusCodec(); + + @override + OldRequestStatus decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Unrequested._decode(input); + case 1: + return Requested._decode(input); + default: + throw Exception('OldRequestStatus: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(OldRequestStatus value, _i1.Output output) { + switch (value.runtimeType) { + case Unrequested: + (value as Unrequested).encodeTo(output); + break; + case Requested: + (value as Requested).encodeTo(output); + break; + default: + throw Exception('OldRequestStatus: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(OldRequestStatus value) { + switch (value.runtimeType) { + case Unrequested: + return (value as Unrequested)._sizeHint(); + case Requested: + return (value as Requested)._sizeHint(); + default: + throw Exception('OldRequestStatus: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Unrequested extends OldRequestStatus { + const Unrequested({required this.deposit, required this.len}); + + factory Unrequested._decode(_i1.Input input) { + return Unrequested( + deposit: const _i3.Tuple2Codec<_i4.AccountId32, BigInt>( + _i4.AccountId32Codec(), + _i1.U128Codec.codec, + ).decode(input), + len: _i1.U32Codec.codec.decode(input), + ); + } + + /// (AccountId, Balance) + final _i3.Tuple2<_i4.AccountId32, BigInt> deposit; + + /// u32 + final int len; + + @override + Map> toJson() => { + 'Unrequested': { + 'deposit': [deposit.value0.toList(), deposit.value1], + 'len': len, + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i3.Tuple2Codec<_i4.AccountId32, BigInt>(_i4.AccountId32Codec(), _i1.U128Codec.codec).sizeHint(deposit); + size = size + _i1.U32Codec.codec.sizeHint(len); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i3.Tuple2Codec<_i4.AccountId32, BigInt>( + _i4.AccountId32Codec(), + _i1.U128Codec.codec, + ).encodeTo(deposit, output); + _i1.U32Codec.codec.encodeTo(len, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Unrequested && other.deposit == deposit && other.len == len; + + @override + int get hashCode => Object.hash(deposit, len); +} + +class Requested extends OldRequestStatus { + const Requested({this.deposit, required this.count, this.len}); + + factory Requested._decode(_i1.Input input) { + return Requested( + deposit: const _i1.OptionCodec<_i3.Tuple2<_i4.AccountId32, BigInt>>( + _i3.Tuple2Codec<_i4.AccountId32, BigInt>(_i4.AccountId32Codec(), _i1.U128Codec.codec), + ).decode(input), + count: _i1.U32Codec.codec.decode(input), + len: const _i1.OptionCodec(_i1.U32Codec.codec).decode(input), + ); + } + + /// Option<(AccountId, Balance)> + final _i3.Tuple2<_i4.AccountId32, BigInt>? deposit; + + /// u32 + final int count; + + /// Option + final int? len; + + @override + Map> toJson() => { + 'Requested': { + 'deposit': [deposit?.value0.toList(), deposit?.value1], + 'count': count, + 'len': len, + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i1.OptionCodec<_i3.Tuple2<_i4.AccountId32, BigInt>>( + _i3.Tuple2Codec<_i4.AccountId32, BigInt>(_i4.AccountId32Codec(), _i1.U128Codec.codec), + ).sizeHint(deposit); + size = size + _i1.U32Codec.codec.sizeHint(count); + size = size + const _i1.OptionCodec(_i1.U32Codec.codec).sizeHint(len); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.OptionCodec<_i3.Tuple2<_i4.AccountId32, BigInt>>( + _i3.Tuple2Codec<_i4.AccountId32, BigInt>(_i4.AccountId32Codec(), _i1.U128Codec.codec), + ).encodeTo(deposit, output); + _i1.U32Codec.codec.encodeTo(count, output); + const _i1.OptionCodec(_i1.U32Codec.codec).encodeTo(len, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Requested && other.deposit == deposit && other.count == count && other.len == len; + + @override + int get hashCode => Object.hash(deposit, count, len); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/call.dart new file mode 100644 index 00000000..246d31d2 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/call.dart @@ -0,0 +1,310 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../primitive_types/h256.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map>> toJson(); +} + +class $Call { + const $Call(); + + NotePreimage notePreimage({required List bytes}) { + return NotePreimage(bytes: bytes); + } + + UnnotePreimage unnotePreimage({required _i3.H256 hash}) { + return UnnotePreimage(hash: hash); + } + + RequestPreimage requestPreimage({required _i3.H256 hash}) { + return RequestPreimage(hash: hash); + } + + UnrequestPreimage unrequestPreimage({required _i3.H256 hash}) { + return UnrequestPreimage(hash: hash); + } + + EnsureUpdated ensureUpdated({required List<_i3.H256> hashes}) { + return EnsureUpdated(hashes: hashes); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return NotePreimage._decode(input); + case 1: + return UnnotePreimage._decode(input); + case 2: + return RequestPreimage._decode(input); + case 3: + return UnrequestPreimage._decode(input); + case 4: + return EnsureUpdated._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case NotePreimage: + (value as NotePreimage).encodeTo(output); + break; + case UnnotePreimage: + (value as UnnotePreimage).encodeTo(output); + break; + case RequestPreimage: + (value as RequestPreimage).encodeTo(output); + break; + case UnrequestPreimage: + (value as UnrequestPreimage).encodeTo(output); + break; + case EnsureUpdated: + (value as EnsureUpdated).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case NotePreimage: + return (value as NotePreimage)._sizeHint(); + case UnnotePreimage: + return (value as UnnotePreimage)._sizeHint(); + case RequestPreimage: + return (value as RequestPreimage)._sizeHint(); + case UnrequestPreimage: + return (value as UnrequestPreimage)._sizeHint(); + case EnsureUpdated: + return (value as EnsureUpdated)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Register a preimage on-chain. +/// +/// If the preimage was previously requested, no fees or deposits are taken for providing +/// the preimage. Otherwise, a deposit is taken proportional to the size of the preimage. +class NotePreimage extends Call { + const NotePreimage({required this.bytes}); + + factory NotePreimage._decode(_i1.Input input) { + return NotePreimage(bytes: _i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List bytes; + + @override + Map>> toJson() => { + 'note_preimage': {'bytes': bytes}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(bytes); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U8SequenceCodec.codec.encodeTo(bytes, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is NotePreimage && _i4.listsEqual(other.bytes, bytes); + + @override + int get hashCode => bytes.hashCode; +} + +/// Clear an unrequested preimage from the runtime storage. +/// +/// If `len` is provided, then it will be a much cheaper operation. +/// +/// - `hash`: The hash of the preimage to be removed from the store. +/// - `len`: The length of the preimage of `hash`. +class UnnotePreimage extends Call { + const UnnotePreimage({required this.hash}); + + factory UnnotePreimage._decode(_i1.Input input) { + return UnnotePreimage(hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i3.H256 hash; + + @override + Map>> toJson() => { + 'unnote_preimage': {'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is UnnotePreimage && _i4.listsEqual(other.hash, hash); + + @override + int get hashCode => hash.hashCode; +} + +/// Request a preimage be uploaded to the chain without paying any fees or deposits. +/// +/// If the preimage requests has already been provided on-chain, we unreserve any deposit +/// a user may have paid, and take the control of the preimage out of their hands. +class RequestPreimage extends Call { + const RequestPreimage({required this.hash}); + + factory RequestPreimage._decode(_i1.Input input) { + return RequestPreimage(hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i3.H256 hash; + + @override + Map>> toJson() => { + 'request_preimage': {'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RequestPreimage && _i4.listsEqual(other.hash, hash); + + @override + int get hashCode => hash.hashCode; +} + +/// Clear a previously made request for a preimage. +/// +/// NOTE: THIS MUST NOT BE CALLED ON `hash` MORE TIMES THAN `request_preimage`. +class UnrequestPreimage extends Call { + const UnrequestPreimage({required this.hash}); + + factory UnrequestPreimage._decode(_i1.Input input) { + return UnrequestPreimage(hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i3.H256 hash; + + @override + Map>> toJson() => { + 'unrequest_preimage': {'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is UnrequestPreimage && _i4.listsEqual(other.hash, hash); + + @override + int get hashCode => hash.hashCode; +} + +/// Ensure that the bulk of pre-images is upgraded. +/// +/// The caller pays no fee if at least 90% of pre-images were successfully updated. +class EnsureUpdated extends Call { + const EnsureUpdated({required this.hashes}); + + factory EnsureUpdated._decode(_i1.Input input) { + return EnsureUpdated(hashes: const _i1.SequenceCodec<_i3.H256>(_i3.H256Codec()).decode(input)); + } + + /// Vec + final List<_i3.H256> hashes; + + @override + Map>>> toJson() => { + 'ensure_updated': {'hashes': hashes.map((value) => value.toList()).toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.SequenceCodec<_i3.H256>(_i3.H256Codec()).sizeHint(hashes); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.SequenceCodec<_i3.H256>(_i3.H256Codec()).encodeTo(hashes, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is EnsureUpdated && _i4.listsEqual(other.hashes, hashes); + + @override + int get hashCode => hashes.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/error.dart new file mode 100644 index 00000000..74a1a50f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/error.dart @@ -0,0 +1,83 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Preimage is too large to store on-chain. + tooBig('TooBig', 0), + + /// Preimage has already been noted on-chain. + alreadyNoted('AlreadyNoted', 1), + + /// The user is not authorized to perform this action. + notAuthorized('NotAuthorized', 2), + + /// The preimage cannot be removed since it has not yet been noted. + notNoted('NotNoted', 3), + + /// A preimage may not be removed when there are outstanding requests. + requested('Requested', 4), + + /// The preimage request cannot be removed since no outstanding requests exist. + notRequested('NotRequested', 5), + + /// More than `MAX_HASH_UPGRADE_BULK_COUNT` hashes were requested to be upgraded at once. + tooMany('TooMany', 6), + + /// Too few hashes were requested to be upgraded (i.e. zero). + tooFew('TooFew', 7); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.tooBig; + case 1: + return Error.alreadyNoted; + case 2: + return Error.notAuthorized; + case 3: + return Error.notNoted; + case 4: + return Error.requested; + case 5: + return Error.notRequested; + case 6: + return Error.tooMany; + case 7: + return Error.tooFew; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/event.dart new file mode 100644 index 00000000..26f10fba --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/event.dart @@ -0,0 +1,200 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../primitive_types/h256.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map>> toJson(); +} + +class $Event { + const $Event(); + + Noted noted({required _i3.H256 hash}) { + return Noted(hash: hash); + } + + Requested requested({required _i3.H256 hash}) { + return Requested(hash: hash); + } + + Cleared cleared({required _i3.H256 hash}) { + return Cleared(hash: hash); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Noted._decode(input); + case 1: + return Requested._decode(input); + case 2: + return Cleared._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Noted: + (value as Noted).encodeTo(output); + break; + case Requested: + (value as Requested).encodeTo(output); + break; + case Cleared: + (value as Cleared).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Noted: + return (value as Noted)._sizeHint(); + case Requested: + return (value as Requested)._sizeHint(); + case Cleared: + return (value as Cleared)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A preimage has been noted. +class Noted extends Event { + const Noted({required this.hash}); + + factory Noted._decode(_i1.Input input) { + return Noted(hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i3.H256 hash; + + @override + Map>> toJson() => { + 'Noted': {'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Noted && _i4.listsEqual(other.hash, hash); + + @override + int get hashCode => hash.hashCode; +} + +/// A preimage has been requested. +class Requested extends Event { + const Requested({required this.hash}); + + factory Requested._decode(_i1.Input input) { + return Requested(hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i3.H256 hash; + + @override + Map>> toJson() => { + 'Requested': {'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Requested && _i4.listsEqual(other.hash, hash); + + @override + int get hashCode => hash.hashCode; +} + +/// A preimage has ben cleared. +class Cleared extends Event { + const Cleared({required this.hash}); + + factory Cleared._decode(_i1.Input input) { + return Cleared(hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i3.H256 hash; + + @override + Map>> toJson() => { + 'Cleared': {'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Cleared && _i4.listsEqual(other.hash, hash); + + @override + int get hashCode => hash.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/hold_reason.dart b/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/hold_reason.dart new file mode 100644 index 00000000..6bcca1d8 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_preimage/pallet/hold_reason.dart @@ -0,0 +1,46 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum HoldReason { + preimage('Preimage', 0); + + const HoldReason(this.variantName, this.codecIndex); + + factory HoldReason.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $HoldReasonCodec codec = $HoldReasonCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $HoldReasonCodec with _i1.Codec { + const $HoldReasonCodec(); + + @override + HoldReason decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return HoldReason.preimage; + default: + throw Exception('HoldReason: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(HoldReason value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_preimage/request_status.dart b/quantus_sdk/lib/generated/planck/types/pallet_preimage/request_status.dart new file mode 100644 index 00000000..c3a46ad8 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_preimage/request_status.dart @@ -0,0 +1,208 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../quantus_runtime/governance/definitions/preimage_deposit.dart' as _i5; +import '../sp_core/crypto/account_id32.dart' as _i4; +import '../tuples.dart' as _i3; + +abstract class RequestStatus { + const RequestStatus(); + + factory RequestStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $RequestStatusCodec codec = $RequestStatusCodec(); + + static const $RequestStatus values = $RequestStatus(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $RequestStatus { + const $RequestStatus(); + + Unrequested unrequested({required _i3.Tuple2<_i4.AccountId32, _i5.PreimageDeposit> ticket, required int len}) { + return Unrequested(ticket: ticket, len: len); + } + + Requested requested({ + _i3.Tuple2<_i4.AccountId32, _i5.PreimageDeposit>? maybeTicket, + required int count, + int? maybeLen, + }) { + return Requested(maybeTicket: maybeTicket, count: count, maybeLen: maybeLen); + } +} + +class $RequestStatusCodec with _i1.Codec { + const $RequestStatusCodec(); + + @override + RequestStatus decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Unrequested._decode(input); + case 1: + return Requested._decode(input); + default: + throw Exception('RequestStatus: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(RequestStatus value, _i1.Output output) { + switch (value.runtimeType) { + case Unrequested: + (value as Unrequested).encodeTo(output); + break; + case Requested: + (value as Requested).encodeTo(output); + break; + default: + throw Exception('RequestStatus: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(RequestStatus value) { + switch (value.runtimeType) { + case Unrequested: + return (value as Unrequested)._sizeHint(); + case Requested: + return (value as Requested)._sizeHint(); + default: + throw Exception('RequestStatus: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Unrequested extends RequestStatus { + const Unrequested({required this.ticket, required this.len}); + + factory Unrequested._decode(_i1.Input input) { + return Unrequested( + ticket: const _i3.Tuple2Codec<_i4.AccountId32, _i5.PreimageDeposit>( + _i4.AccountId32Codec(), + _i5.PreimageDeposit.codec, + ).decode(input), + len: _i1.U32Codec.codec.decode(input), + ); + } + + /// (AccountId, Ticket) + final _i3.Tuple2<_i4.AccountId32, _i5.PreimageDeposit> ticket; + + /// u32 + final int len; + + @override + Map> toJson() => { + 'Unrequested': { + 'ticket': [ticket.value0.toList(), ticket.value1.toJson()], + 'len': len, + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i3.Tuple2Codec<_i4.AccountId32, _i5.PreimageDeposit>( + _i4.AccountId32Codec(), + _i5.PreimageDeposit.codec, + ).sizeHint(ticket); + size = size + _i1.U32Codec.codec.sizeHint(len); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i3.Tuple2Codec<_i4.AccountId32, _i5.PreimageDeposit>( + _i4.AccountId32Codec(), + _i5.PreimageDeposit.codec, + ).encodeTo(ticket, output); + _i1.U32Codec.codec.encodeTo(len, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Unrequested && other.ticket == ticket && other.len == len; + + @override + int get hashCode => Object.hash(ticket, len); +} + +class Requested extends RequestStatus { + const Requested({this.maybeTicket, required this.count, this.maybeLen}); + + factory Requested._decode(_i1.Input input) { + return Requested( + maybeTicket: const _i1.OptionCodec<_i3.Tuple2<_i4.AccountId32, _i5.PreimageDeposit>>( + _i3.Tuple2Codec<_i4.AccountId32, _i5.PreimageDeposit>(_i4.AccountId32Codec(), _i5.PreimageDeposit.codec), + ).decode(input), + count: _i1.U32Codec.codec.decode(input), + maybeLen: const _i1.OptionCodec(_i1.U32Codec.codec).decode(input), + ); + } + + /// Option<(AccountId, Ticket)> + final _i3.Tuple2<_i4.AccountId32, _i5.PreimageDeposit>? maybeTicket; + + /// u32 + final int count; + + /// Option + final int? maybeLen; + + @override + Map> toJson() => { + 'Requested': { + 'maybeTicket': [maybeTicket?.value0.toList(), maybeTicket?.value1.toJson()], + 'count': count, + 'maybeLen': maybeLen, + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i1.OptionCodec<_i3.Tuple2<_i4.AccountId32, _i5.PreimageDeposit>>( + _i3.Tuple2Codec<_i4.AccountId32, _i5.PreimageDeposit>(_i4.AccountId32Codec(), _i5.PreimageDeposit.codec), + ).sizeHint(maybeTicket); + size = size + _i1.U32Codec.codec.sizeHint(count); + size = size + const _i1.OptionCodec(_i1.U32Codec.codec).sizeHint(maybeLen); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.OptionCodec<_i3.Tuple2<_i4.AccountId32, _i5.PreimageDeposit>>( + _i3.Tuple2Codec<_i4.AccountId32, _i5.PreimageDeposit>(_i4.AccountId32Codec(), _i5.PreimageDeposit.codec), + ).encodeTo(maybeTicket, output); + _i1.U32Codec.codec.encodeTo(count, output); + const _i1.OptionCodec(_i1.U32Codec.codec).encodeTo(maybeLen, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Requested && other.maybeTicket == maybeTicket && other.count == count && other.maybeLen == maybeLen; + + @override + int get hashCode => Object.hash(maybeTicket, count, maybeLen); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_qpow/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_qpow/pallet/event.dart new file mode 100644 index 00000000..e35e03cc --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_qpow/pallet/event.dart @@ -0,0 +1,211 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../primitive_types/u512.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + ProofSubmitted proofSubmitted({ + required List nonce, + required _i3.U512 difficulty, + required _i3.U512 hashAchieved, + }) { + return ProofSubmitted(nonce: nonce, difficulty: difficulty, hashAchieved: hashAchieved); + } + + DifficultyAdjusted difficultyAdjusted({ + required _i3.U512 oldDifficulty, + required _i3.U512 newDifficulty, + required BigInt observedBlockTime, + }) { + return DifficultyAdjusted( + oldDifficulty: oldDifficulty, + newDifficulty: newDifficulty, + observedBlockTime: observedBlockTime, + ); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return ProofSubmitted._decode(input); + case 1: + return DifficultyAdjusted._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case ProofSubmitted: + (value as ProofSubmitted).encodeTo(output); + break; + case DifficultyAdjusted: + (value as DifficultyAdjusted).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case ProofSubmitted: + return (value as ProofSubmitted)._sizeHint(); + case DifficultyAdjusted: + return (value as DifficultyAdjusted)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class ProofSubmitted extends Event { + const ProofSubmitted({required this.nonce, required this.difficulty, required this.hashAchieved}); + + factory ProofSubmitted._decode(_i1.Input input) { + return ProofSubmitted( + nonce: const _i1.U8ArrayCodec(64).decode(input), + difficulty: const _i1.U64ArrayCodec(8).decode(input), + hashAchieved: const _i1.U64ArrayCodec(8).decode(input), + ); + } + + /// NonceType + final List nonce; + + /// U512 + final _i3.U512 difficulty; + + /// U512 + final _i3.U512 hashAchieved; + + @override + Map>> toJson() => { + 'ProofSubmitted': { + 'nonce': nonce.toList(), + 'difficulty': difficulty.toList(), + 'hashAchieved': hashAchieved.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(64).sizeHint(nonce); + size = size + const _i3.U512Codec().sizeHint(difficulty); + size = size + const _i3.U512Codec().sizeHint(hashAchieved); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(64).encodeTo(nonce, output); + const _i1.U64ArrayCodec(8).encodeTo(difficulty, output); + const _i1.U64ArrayCodec(8).encodeTo(hashAchieved, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProofSubmitted && + _i4.listsEqual(other.nonce, nonce) && + _i4.listsEqual(other.difficulty, difficulty) && + _i4.listsEqual(other.hashAchieved, hashAchieved); + + @override + int get hashCode => Object.hash(nonce, difficulty, hashAchieved); +} + +class DifficultyAdjusted extends Event { + const DifficultyAdjusted({required this.oldDifficulty, required this.newDifficulty, required this.observedBlockTime}); + + factory DifficultyAdjusted._decode(_i1.Input input) { + return DifficultyAdjusted( + oldDifficulty: const _i1.U64ArrayCodec(8).decode(input), + newDifficulty: const _i1.U64ArrayCodec(8).decode(input), + observedBlockTime: _i1.U64Codec.codec.decode(input), + ); + } + + /// Difficulty + final _i3.U512 oldDifficulty; + + /// Difficulty + final _i3.U512 newDifficulty; + + /// BlockDuration + final BigInt observedBlockTime; + + @override + Map> toJson() => { + 'DifficultyAdjusted': { + 'oldDifficulty': oldDifficulty.toList(), + 'newDifficulty': newDifficulty.toList(), + 'observedBlockTime': observedBlockTime, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.U512Codec().sizeHint(oldDifficulty); + size = size + const _i3.U512Codec().sizeHint(newDifficulty); + size = size + _i1.U64Codec.codec.sizeHint(observedBlockTime); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U64ArrayCodec(8).encodeTo(oldDifficulty, output); + const _i1.U64ArrayCodec(8).encodeTo(newDifficulty, output); + _i1.U64Codec.codec.encodeTo(observedBlockTime, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DifficultyAdjusted && + _i4.listsEqual(other.oldDifficulty, oldDifficulty) && + _i4.listsEqual(other.newDifficulty, newDifficulty) && + other.observedBlockTime == observedBlockTime; + + @override + int get hashCode => Object.hash(oldDifficulty, newDifficulty, observedBlockTime); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/member_record.dart b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/member_record.dart new file mode 100644 index 00000000..bbcabb46 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/member_record.dart @@ -0,0 +1,50 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class MemberRecord { + const MemberRecord({required this.rank}); + + factory MemberRecord.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Rank + final int rank; + + static const $MemberRecordCodec codec = $MemberRecordCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'rank': rank}; + + @override + bool operator ==(Object other) => identical(this, other) || other is MemberRecord && other.rank == rank; + + @override + int get hashCode => rank.hashCode; +} + +class $MemberRecordCodec with _i1.Codec { + const $MemberRecordCodec(); + + @override + void encodeTo(MemberRecord obj, _i1.Output output) { + _i1.U16Codec.codec.encodeTo(obj.rank, output); + } + + @override + MemberRecord decode(_i1.Input input) { + return MemberRecord(rank: _i1.U16Codec.codec.decode(input)); + } + + @override + int sizeHint(MemberRecord obj) { + int size = 0; + size = size + _i1.U16Codec.codec.sizeHint(obj.rank); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/call.dart new file mode 100644 index 00000000..430c043f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/call.dart @@ -0,0 +1,447 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../sp_runtime/multiaddress/multi_address.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + AddMember addMember({required _i3.MultiAddress who}) { + return AddMember(who: who); + } + + PromoteMember promoteMember({required _i3.MultiAddress who}) { + return PromoteMember(who: who); + } + + DemoteMember demoteMember({required _i3.MultiAddress who}) { + return DemoteMember(who: who); + } + + RemoveMember removeMember({required _i3.MultiAddress who, required int minRank}) { + return RemoveMember(who: who, minRank: minRank); + } + + Vote vote({required int poll, required bool aye}) { + return Vote(poll: poll, aye: aye); + } + + CleanupPoll cleanupPoll({required int pollIndex, required int max}) { + return CleanupPoll(pollIndex: pollIndex, max: max); + } + + ExchangeMember exchangeMember({required _i3.MultiAddress who, required _i3.MultiAddress newWho}) { + return ExchangeMember(who: who, newWho: newWho); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return AddMember._decode(input); + case 1: + return PromoteMember._decode(input); + case 2: + return DemoteMember._decode(input); + case 3: + return RemoveMember._decode(input); + case 4: + return Vote._decode(input); + case 5: + return CleanupPoll._decode(input); + case 6: + return ExchangeMember._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case AddMember: + (value as AddMember).encodeTo(output); + break; + case PromoteMember: + (value as PromoteMember).encodeTo(output); + break; + case DemoteMember: + (value as DemoteMember).encodeTo(output); + break; + case RemoveMember: + (value as RemoveMember).encodeTo(output); + break; + case Vote: + (value as Vote).encodeTo(output); + break; + case CleanupPoll: + (value as CleanupPoll).encodeTo(output); + break; + case ExchangeMember: + (value as ExchangeMember).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case AddMember: + return (value as AddMember)._sizeHint(); + case PromoteMember: + return (value as PromoteMember)._sizeHint(); + case DemoteMember: + return (value as DemoteMember)._sizeHint(); + case RemoveMember: + return (value as RemoveMember)._sizeHint(); + case Vote: + return (value as Vote)._sizeHint(); + case CleanupPoll: + return (value as CleanupPoll)._sizeHint(); + case ExchangeMember: + return (value as ExchangeMember)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Introduce a new member. +/// +/// - `origin`: Must be the `AddOrigin`. +/// - `who`: Account of non-member which will become a member. +/// +/// Weight: `O(1)` +class AddMember extends Call { + const AddMember({required this.who}); + + factory AddMember._decode(_i1.Input input) { + return AddMember(who: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + @override + Map>> toJson() => { + 'add_member': {'who': who.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.MultiAddress.codec.encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is AddMember && other.who == who; + + @override + int get hashCode => who.hashCode; +} + +/// Increment the rank of an existing member by one. +/// +/// - `origin`: Must be the `PromoteOrigin`. +/// - `who`: Account of existing member. +/// +/// Weight: `O(1)` +class PromoteMember extends Call { + const PromoteMember({required this.who}); + + factory PromoteMember._decode(_i1.Input input) { + return PromoteMember(who: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + @override + Map>> toJson() => { + 'promote_member': {'who': who.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i3.MultiAddress.codec.encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is PromoteMember && other.who == who; + + @override + int get hashCode => who.hashCode; +} + +/// Decrement the rank of an existing member by one. If the member is already at rank zero, +/// then they are removed entirely. +/// +/// - `origin`: Must be the `DemoteOrigin`. +/// - `who`: Account of existing member of rank greater than zero. +/// +/// Weight: `O(1)`, less if the member's index is highest in its rank. +class DemoteMember extends Call { + const DemoteMember({required this.who}); + + factory DemoteMember._decode(_i1.Input input) { + return DemoteMember(who: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + @override + Map>> toJson() => { + 'demote_member': {'who': who.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i3.MultiAddress.codec.encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is DemoteMember && other.who == who; + + @override + int get hashCode => who.hashCode; +} + +/// Remove the member entirely. +/// +/// - `origin`: Must be the `RemoveOrigin`. +/// - `who`: Account of existing member of rank greater than zero. +/// - `min_rank`: The rank of the member or greater. +/// +/// Weight: `O(min_rank)`. +class RemoveMember extends Call { + const RemoveMember({required this.who, required this.minRank}); + + factory RemoveMember._decode(_i1.Input input) { + return RemoveMember(who: _i3.MultiAddress.codec.decode(input), minRank: _i1.U16Codec.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + /// Rank + final int minRank; + + @override + Map> toJson() => { + 'remove_member': {'who': who.toJson(), 'minRank': minRank}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(who); + size = size + _i1.U16Codec.codec.sizeHint(minRank); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i3.MultiAddress.codec.encodeTo(who, output); + _i1.U16Codec.codec.encodeTo(minRank, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RemoveMember && other.who == who && other.minRank == minRank; + + @override + int get hashCode => Object.hash(who, minRank); +} + +/// Add an aye or nay vote for the sender to the given proposal. +/// +/// - `origin`: Must be `Signed` by a member account. +/// - `poll`: Index of a poll which is ongoing. +/// - `aye`: `true` if the vote is to approve the proposal, `false` otherwise. +/// +/// Transaction fees are be waived if the member is voting on any particular proposal +/// for the first time and the call is successful. Subsequent vote changes will charge a +/// fee. +/// +/// Weight: `O(1)`, less if there was no previous vote on the poll by the member. +class Vote extends Call { + const Vote({required this.poll, required this.aye}); + + factory Vote._decode(_i1.Input input) { + return Vote(poll: _i1.U32Codec.codec.decode(input), aye: _i1.BoolCodec.codec.decode(input)); + } + + /// PollIndexOf + final int poll; + + /// bool + final bool aye; + + @override + Map> toJson() => { + 'vote': {'poll': poll, 'aye': aye}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(poll); + size = size + _i1.BoolCodec.codec.sizeHint(aye); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U32Codec.codec.encodeTo(poll, output); + _i1.BoolCodec.codec.encodeTo(aye, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Vote && other.poll == poll && other.aye == aye; + + @override + int get hashCode => Object.hash(poll, aye); +} + +/// Remove votes from the given poll. It must have ended. +/// +/// - `origin`: Must be `Signed` by any account. +/// - `poll_index`: Index of a poll which is completed and for which votes continue to +/// exist. +/// - `max`: Maximum number of vote items from remove in this call. +/// +/// Transaction fees are waived if the operation is successful. +/// +/// Weight `O(max)` (less if there are fewer items to remove than `max`). +class CleanupPoll extends Call { + const CleanupPoll({required this.pollIndex, required this.max}); + + factory CleanupPoll._decode(_i1.Input input) { + return CleanupPoll(pollIndex: _i1.U32Codec.codec.decode(input), max: _i1.U32Codec.codec.decode(input)); + } + + /// PollIndexOf + final int pollIndex; + + /// u32 + final int max; + + @override + Map> toJson() => { + 'cleanup_poll': {'pollIndex': pollIndex, 'max': max}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(pollIndex); + size = size + _i1.U32Codec.codec.sizeHint(max); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(pollIndex, output); + _i1.U32Codec.codec.encodeTo(max, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is CleanupPoll && other.pollIndex == pollIndex && other.max == max; + + @override + int get hashCode => Object.hash(pollIndex, max); +} + +/// Exchanges a member with a new account and the same existing rank. +/// +/// - `origin`: Must be the `ExchangeOrigin`. +/// - `who`: Account of existing member of rank greater than zero to be exchanged. +/// - `new_who`: New Account of existing member of rank greater than zero to exchanged to. +class ExchangeMember extends Call { + const ExchangeMember({required this.who, required this.newWho}); + + factory ExchangeMember._decode(_i1.Input input) { + return ExchangeMember(who: _i3.MultiAddress.codec.decode(input), newWho: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress who; + + /// AccountIdLookupOf + final _i3.MultiAddress newWho; + + @override + Map>> toJson() => { + 'exchange_member': {'who': who.toJson(), 'newWho': newWho.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(who); + size = size + _i3.MultiAddress.codec.sizeHint(newWho); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i3.MultiAddress.codec.encodeTo(who, output); + _i3.MultiAddress.codec.encodeTo(newWho, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ExchangeMember && other.who == who && other.newWho == newWho; + + @override + int get hashCode => Object.hash(who, newWho); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/error.dart new file mode 100644 index 00000000..a0939c33 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/error.dart @@ -0,0 +1,98 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Account is already a member. + alreadyMember('AlreadyMember', 0), + + /// Account is not a member. + notMember('NotMember', 1), + + /// The given poll index is unknown or has closed. + notPolling('NotPolling', 2), + + /// The given poll is still ongoing. + ongoing('Ongoing', 3), + + /// There are no further records to be removed. + noneRemaining('NoneRemaining', 4), + + /// Unexpected error in state. + corruption('Corruption', 5), + + /// The member's rank is too low to vote. + rankTooLow('RankTooLow', 6), + + /// The information provided is incorrect. + invalidWitness('InvalidWitness', 7), + + /// The origin is not sufficiently privileged to do the operation. + noPermission('NoPermission', 8), + + /// The new member to exchange is the same as the old member + sameMember('SameMember', 9), + + /// The max member count for the rank has been reached. + tooManyMembers('TooManyMembers', 10); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.alreadyMember; + case 1: + return Error.notMember; + case 2: + return Error.notPolling; + case 3: + return Error.ongoing; + case 4: + return Error.noneRemaining; + case 5: + return Error.corruption; + case 6: + return Error.rankTooLow; + case 7: + return Error.invalidWitness; + case 8: + return Error.noPermission; + case 9: + return Error.sameMember; + case 10: + return Error.tooManyMembers; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/event.dart new file mode 100644 index 00000000..380795e3 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/pallet/event.dart @@ -0,0 +1,346 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i6; + +import '../../sp_core/crypto/account_id32.dart' as _i3; +import '../tally.dart' as _i5; +import '../vote_record.dart' as _i4; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + MemberAdded memberAdded({required _i3.AccountId32 who}) { + return MemberAdded(who: who); + } + + RankChanged rankChanged({required _i3.AccountId32 who, required int rank}) { + return RankChanged(who: who, rank: rank); + } + + MemberRemoved memberRemoved({required _i3.AccountId32 who, required int rank}) { + return MemberRemoved(who: who, rank: rank); + } + + Voted voted({ + required _i3.AccountId32 who, + required int poll, + required _i4.VoteRecord vote, + required _i5.Tally tally, + }) { + return Voted(who: who, poll: poll, vote: vote, tally: tally); + } + + MemberExchanged memberExchanged({required _i3.AccountId32 who, required _i3.AccountId32 newWho}) { + return MemberExchanged(who: who, newWho: newWho); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return MemberAdded._decode(input); + case 1: + return RankChanged._decode(input); + case 2: + return MemberRemoved._decode(input); + case 3: + return Voted._decode(input); + case 4: + return MemberExchanged._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case MemberAdded: + (value as MemberAdded).encodeTo(output); + break; + case RankChanged: + (value as RankChanged).encodeTo(output); + break; + case MemberRemoved: + (value as MemberRemoved).encodeTo(output); + break; + case Voted: + (value as Voted).encodeTo(output); + break; + case MemberExchanged: + (value as MemberExchanged).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case MemberAdded: + return (value as MemberAdded)._sizeHint(); + case RankChanged: + return (value as RankChanged)._sizeHint(); + case MemberRemoved: + return (value as MemberRemoved)._sizeHint(); + case Voted: + return (value as Voted)._sizeHint(); + case MemberExchanged: + return (value as MemberExchanged)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A member `who` has been added. +class MemberAdded extends Event { + const MemberAdded({required this.who}); + + factory MemberAdded._decode(_i1.Input input) { + return MemberAdded(who: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + @override + Map>> toJson() => { + 'MemberAdded': {'who': who.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is MemberAdded && _i6.listsEqual(other.who, who); + + @override + int get hashCode => who.hashCode; +} + +/// The member `who`se rank has been changed to the given `rank`. +class RankChanged extends Event { + const RankChanged({required this.who, required this.rank}); + + factory RankChanged._decode(_i1.Input input) { + return RankChanged(who: const _i1.U8ArrayCodec(32).decode(input), rank: _i1.U16Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// Rank + final int rank; + + @override + Map> toJson() => { + 'RankChanged': {'who': who.toList(), 'rank': rank}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U16Codec.codec.sizeHint(rank); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U16Codec.codec.encodeTo(rank, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RankChanged && _i6.listsEqual(other.who, who) && other.rank == rank; + + @override + int get hashCode => Object.hash(who, rank); +} + +/// The member `who` of given `rank` has been removed from the collective. +class MemberRemoved extends Event { + const MemberRemoved({required this.who, required this.rank}); + + factory MemberRemoved._decode(_i1.Input input) { + return MemberRemoved(who: const _i1.U8ArrayCodec(32).decode(input), rank: _i1.U16Codec.codec.decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// Rank + final int rank; + + @override + Map> toJson() => { + 'MemberRemoved': {'who': who.toList(), 'rank': rank}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U16Codec.codec.sizeHint(rank); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U16Codec.codec.encodeTo(rank, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is MemberRemoved && _i6.listsEqual(other.who, who) && other.rank == rank; + + @override + int get hashCode => Object.hash(who, rank); +} + +/// The member `who` has voted for the `poll` with the given `vote` leading to an updated +/// `tally`. +class Voted extends Event { + const Voted({required this.who, required this.poll, required this.vote, required this.tally}); + + factory Voted._decode(_i1.Input input) { + return Voted( + who: const _i1.U8ArrayCodec(32).decode(input), + poll: _i1.U32Codec.codec.decode(input), + vote: _i4.VoteRecord.codec.decode(input), + tally: _i5.Tally.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// PollIndexOf + final int poll; + + /// VoteRecord + final _i4.VoteRecord vote; + + /// TallyOf + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Voted': {'who': who.toList(), 'poll': poll, 'vote': vote.toJson(), 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U32Codec.codec.sizeHint(poll); + size = size + _i4.VoteRecord.codec.sizeHint(vote); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U32Codec.codec.encodeTo(poll, output); + _i4.VoteRecord.codec.encodeTo(vote, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Voted && + _i6.listsEqual(other.who, who) && + other.poll == poll && + other.vote == vote && + other.tally == tally; + + @override + int get hashCode => Object.hash(who, poll, vote, tally); +} + +/// The member `who` had their `AccountId` changed to `new_who`. +class MemberExchanged extends Event { + const MemberExchanged({required this.who, required this.newWho}); + + factory MemberExchanged._decode(_i1.Input input) { + return MemberExchanged( + who: const _i1.U8ArrayCodec(32).decode(input), + newWho: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::AccountId + final _i3.AccountId32 newWho; + + @override + Map>> toJson() => { + 'MemberExchanged': {'who': who.toList(), 'newWho': newWho.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + const _i3.AccountId32Codec().sizeHint(newWho); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + const _i1.U8ArrayCodec(32).encodeTo(newWho, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is MemberExchanged && _i6.listsEqual(other.who, who) && _i6.listsEqual(other.newWho, newWho); + + @override + int get hashCode => Object.hash(who, newWho); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/tally.dart b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/tally.dart new file mode 100644 index 00000000..dcacb46c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/tally.dart @@ -0,0 +1,66 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class Tally { + const Tally({required this.bareAyes, required this.ayes, required this.nays}); + + factory Tally.decode(_i1.Input input) { + return codec.decode(input); + } + + /// MemberIndex + final int bareAyes; + + /// Votes + final int ayes; + + /// Votes + final int nays; + + static const $TallyCodec codec = $TallyCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'bareAyes': bareAyes, 'ayes': ayes, 'nays': nays}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Tally && other.bareAyes == bareAyes && other.ayes == ayes && other.nays == nays; + + @override + int get hashCode => Object.hash(bareAyes, ayes, nays); +} + +class $TallyCodec with _i1.Codec { + const $TallyCodec(); + + @override + void encodeTo(Tally obj, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(obj.bareAyes, output); + _i1.U32Codec.codec.encodeTo(obj.ayes, output); + _i1.U32Codec.codec.encodeTo(obj.nays, output); + } + + @override + Tally decode(_i1.Input input) { + return Tally( + bareAyes: _i1.U32Codec.codec.decode(input), + ayes: _i1.U32Codec.codec.decode(input), + nays: _i1.U32Codec.codec.decode(input), + ); + } + + @override + int sizeHint(Tally obj) { + int size = 0; + size = size + _i1.U32Codec.codec.sizeHint(obj.bareAyes); + size = size + _i1.U32Codec.codec.sizeHint(obj.ayes); + size = size + _i1.U32Codec.codec.sizeHint(obj.nays); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/vote_record.dart b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/vote_record.dart new file mode 100644 index 00000000..8a05fdc3 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_ranked_collective/vote_record.dart @@ -0,0 +1,145 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +abstract class VoteRecord { + const VoteRecord(); + + factory VoteRecord.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $VoteRecordCodec codec = $VoteRecordCodec(); + + static const $VoteRecord values = $VoteRecord(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $VoteRecord { + const $VoteRecord(); + + Aye aye(int value0) { + return Aye(value0); + } + + Nay nay(int value0) { + return Nay(value0); + } +} + +class $VoteRecordCodec with _i1.Codec { + const $VoteRecordCodec(); + + @override + VoteRecord decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Aye._decode(input); + case 1: + return Nay._decode(input); + default: + throw Exception('VoteRecord: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(VoteRecord value, _i1.Output output) { + switch (value.runtimeType) { + case Aye: + (value as Aye).encodeTo(output); + break; + case Nay: + (value as Nay).encodeTo(output); + break; + default: + throw Exception('VoteRecord: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(VoteRecord value) { + switch (value.runtimeType) { + case Aye: + return (value as Aye)._sizeHint(); + case Nay: + return (value as Nay)._sizeHint(); + default: + throw Exception('VoteRecord: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Aye extends VoteRecord { + const Aye(this.value0); + + factory Aye._decode(_i1.Input input) { + return Aye(_i1.U32Codec.codec.decode(input)); + } + + /// Votes + final int value0; + + @override + Map toJson() => {'Aye': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Aye && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Nay extends VoteRecord { + const Nay(this.value0); + + factory Nay._decode(_i1.Input input) { + return Nay(_i1.U32Codec.codec.decode(input)); + } + + /// Votes + final int value0; + + @override + Map toJson() => {'Nay': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Nay && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_recovery/active_recovery.dart b/quantus_sdk/lib/generated/planck/types/pallet_recovery/active_recovery.dart new file mode 100644 index 00000000..2df5af34 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_recovery/active_recovery.dart @@ -0,0 +1,76 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../sp_core/crypto/account_id32.dart' as _i2; + +class ActiveRecovery { + const ActiveRecovery({required this.created, required this.deposit, required this.friends}); + + factory ActiveRecovery.decode(_i1.Input input) { + return codec.decode(input); + } + + /// BlockNumber + final int created; + + /// Balance + final BigInt deposit; + + /// Friends + final List<_i2.AccountId32> friends; + + static const $ActiveRecoveryCodec codec = $ActiveRecoveryCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'created': created, + 'deposit': deposit, + 'friends': friends.map((value) => value.toList()).toList(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ActiveRecovery && + other.created == created && + other.deposit == deposit && + _i4.listsEqual(other.friends, friends); + + @override + int get hashCode => Object.hash(created, deposit, friends); +} + +class $ActiveRecoveryCodec with _i1.Codec { + const $ActiveRecoveryCodec(); + + @override + void encodeTo(ActiveRecovery obj, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(obj.created, output); + _i1.U128Codec.codec.encodeTo(obj.deposit, output); + const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).encodeTo(obj.friends, output); + } + + @override + ActiveRecovery decode(_i1.Input input) { + return ActiveRecovery( + created: _i1.U32Codec.codec.decode(input), + deposit: _i1.U128Codec.codec.decode(input), + friends: const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).decode(input), + ); + } + + @override + int sizeHint(ActiveRecovery obj) { + int size = 0; + size = size + _i1.U32Codec.codec.sizeHint(obj.created); + size = size + _i1.U128Codec.codec.sizeHint(obj.deposit); + size = size + const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).sizeHint(obj.friends); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_recovery/deposit_kind.dart b/quantus_sdk/lib/generated/planck/types/pallet_recovery/deposit_kind.dart new file mode 100644 index 00000000..8e0a6d10 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_recovery/deposit_kind.dart @@ -0,0 +1,135 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../sp_core/crypto/account_id32.dart' as _i3; + +abstract class DepositKind { + const DepositKind(); + + factory DepositKind.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $DepositKindCodec codec = $DepositKindCodec(); + + static const $DepositKind values = $DepositKind(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $DepositKind { + const $DepositKind(); + + RecoveryConfig recoveryConfig() { + return RecoveryConfig(); + } + + ActiveRecoveryFor activeRecoveryFor(_i3.AccountId32 value0) { + return ActiveRecoveryFor(value0); + } +} + +class $DepositKindCodec with _i1.Codec { + const $DepositKindCodec(); + + @override + DepositKind decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return const RecoveryConfig(); + case 1: + return ActiveRecoveryFor._decode(input); + default: + throw Exception('DepositKind: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(DepositKind value, _i1.Output output) { + switch (value.runtimeType) { + case RecoveryConfig: + (value as RecoveryConfig).encodeTo(output); + break; + case ActiveRecoveryFor: + (value as ActiveRecoveryFor).encodeTo(output); + break; + default: + throw Exception('DepositKind: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(DepositKind value) { + switch (value.runtimeType) { + case RecoveryConfig: + return 1; + case ActiveRecoveryFor: + return (value as ActiveRecoveryFor)._sizeHint(); + default: + throw Exception('DepositKind: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class RecoveryConfig extends DepositKind { + const RecoveryConfig(); + + @override + Map toJson() => {'RecoveryConfig': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + } + + @override + bool operator ==(Object other) => other is RecoveryConfig; + + @override + int get hashCode => runtimeType.hashCode; +} + +class ActiveRecoveryFor extends DepositKind { + const ActiveRecoveryFor(this.value0); + + factory ActiveRecoveryFor._decode(_i1.Input input) { + return ActiveRecoveryFor(const _i1.U8ArrayCodec(32).decode(input)); + } + + /// ::AccountId + final _i3.AccountId32 value0; + + @override + Map> toJson() => {'ActiveRecoveryFor': value0.toList()}; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ActiveRecoveryFor && _i4.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/call.dart new file mode 100644 index 00000000..6aa962cb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/call.dart @@ -0,0 +1,653 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i6; + +import '../../quantus_runtime/runtime_call.dart' as _i4; +import '../../sp_core/crypto/account_id32.dart' as _i5; +import '../../sp_runtime/multiaddress/multi_address.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Call { + const $Call(); + + AsRecovered asRecovered({required _i3.MultiAddress account, required _i4.RuntimeCall call}) { + return AsRecovered(account: account, call: call); + } + + SetRecovered setRecovered({required _i3.MultiAddress lost, required _i3.MultiAddress rescuer}) { + return SetRecovered(lost: lost, rescuer: rescuer); + } + + CreateRecovery createRecovery({ + required List<_i5.AccountId32> friends, + required int threshold, + required int delayPeriod, + }) { + return CreateRecovery(friends: friends, threshold: threshold, delayPeriod: delayPeriod); + } + + InitiateRecovery initiateRecovery({required _i3.MultiAddress account}) { + return InitiateRecovery(account: account); + } + + VouchRecovery vouchRecovery({required _i3.MultiAddress lost, required _i3.MultiAddress rescuer}) { + return VouchRecovery(lost: lost, rescuer: rescuer); + } + + ClaimRecovery claimRecovery({required _i3.MultiAddress account}) { + return ClaimRecovery(account: account); + } + + CloseRecovery closeRecovery({required _i3.MultiAddress rescuer}) { + return CloseRecovery(rescuer: rescuer); + } + + RemoveRecovery removeRecovery() { + return RemoveRecovery(); + } + + CancelRecovered cancelRecovered({required _i3.MultiAddress account}) { + return CancelRecovered(account: account); + } + + PokeDeposit pokeDeposit({_i3.MultiAddress? maybeAccount}) { + return PokeDeposit(maybeAccount: maybeAccount); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return AsRecovered._decode(input); + case 1: + return SetRecovered._decode(input); + case 2: + return CreateRecovery._decode(input); + case 3: + return InitiateRecovery._decode(input); + case 4: + return VouchRecovery._decode(input); + case 5: + return ClaimRecovery._decode(input); + case 6: + return CloseRecovery._decode(input); + case 7: + return const RemoveRecovery(); + case 8: + return CancelRecovered._decode(input); + case 9: + return PokeDeposit._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case AsRecovered: + (value as AsRecovered).encodeTo(output); + break; + case SetRecovered: + (value as SetRecovered).encodeTo(output); + break; + case CreateRecovery: + (value as CreateRecovery).encodeTo(output); + break; + case InitiateRecovery: + (value as InitiateRecovery).encodeTo(output); + break; + case VouchRecovery: + (value as VouchRecovery).encodeTo(output); + break; + case ClaimRecovery: + (value as ClaimRecovery).encodeTo(output); + break; + case CloseRecovery: + (value as CloseRecovery).encodeTo(output); + break; + case RemoveRecovery: + (value as RemoveRecovery).encodeTo(output); + break; + case CancelRecovered: + (value as CancelRecovered).encodeTo(output); + break; + case PokeDeposit: + (value as PokeDeposit).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case AsRecovered: + return (value as AsRecovered)._sizeHint(); + case SetRecovered: + return (value as SetRecovered)._sizeHint(); + case CreateRecovery: + return (value as CreateRecovery)._sizeHint(); + case InitiateRecovery: + return (value as InitiateRecovery)._sizeHint(); + case VouchRecovery: + return (value as VouchRecovery)._sizeHint(); + case ClaimRecovery: + return (value as ClaimRecovery)._sizeHint(); + case CloseRecovery: + return (value as CloseRecovery)._sizeHint(); + case RemoveRecovery: + return 1; + case CancelRecovered: + return (value as CancelRecovered)._sizeHint(); + case PokeDeposit: + return (value as PokeDeposit)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Send a call through a recovered account. +/// +/// The dispatch origin for this call must be _Signed_ and registered to +/// be able to make calls on behalf of the recovered account. +/// +/// Parameters: +/// - `account`: The recovered account you want to make a call on-behalf-of. +/// - `call`: The call you want to make with the recovered account. +class AsRecovered extends Call { + const AsRecovered({required this.account, required this.call}); + + factory AsRecovered._decode(_i1.Input input) { + return AsRecovered(account: _i3.MultiAddress.codec.decode(input), call: _i4.RuntimeCall.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress account; + + /// Box<::RuntimeCall> + final _i4.RuntimeCall call; + + @override + Map>> toJson() => { + 'as_recovered': {'account': account.toJson(), 'call': call.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(account); + size = size + _i4.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.MultiAddress.codec.encodeTo(account, output); + _i4.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is AsRecovered && other.account == account && other.call == call; + + @override + int get hashCode => Object.hash(account, call); +} + +/// Allow ROOT to bypass the recovery process and set a rescuer account +/// for a lost account directly. +/// +/// The dispatch origin for this call must be _ROOT_. +/// +/// Parameters: +/// - `lost`: The "lost account" to be recovered. +/// - `rescuer`: The "rescuer account" which can call as the lost account. +class SetRecovered extends Call { + const SetRecovered({required this.lost, required this.rescuer}); + + factory SetRecovered._decode(_i1.Input input) { + return SetRecovered(lost: _i3.MultiAddress.codec.decode(input), rescuer: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress lost; + + /// AccountIdLookupOf + final _i3.MultiAddress rescuer; + + @override + Map>> toJson() => { + 'set_recovered': {'lost': lost.toJson(), 'rescuer': rescuer.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(lost); + size = size + _i3.MultiAddress.codec.sizeHint(rescuer); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i3.MultiAddress.codec.encodeTo(lost, output); + _i3.MultiAddress.codec.encodeTo(rescuer, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is SetRecovered && other.lost == lost && other.rescuer == rescuer; + + @override + int get hashCode => Object.hash(lost, rescuer); +} + +/// Create a recovery configuration for your account. This makes your account recoverable. +/// +/// Payment: `ConfigDepositBase` + `FriendDepositFactor` * #_of_friends balance +/// will be reserved for storing the recovery configuration. This deposit is returned +/// in full when the user calls `remove_recovery`. +/// +/// The dispatch origin for this call must be _Signed_. +/// +/// Parameters: +/// - `friends`: A list of friends you trust to vouch for recovery attempts. Should be +/// ordered and contain no duplicate values. +/// - `threshold`: The number of friends that must vouch for a recovery attempt before the +/// account can be recovered. Should be less than or equal to the length of the list of +/// friends. +/// - `delay_period`: The number of blocks after a recovery attempt is initialized that +/// needs to pass before the account can be recovered. +class CreateRecovery extends Call { + const CreateRecovery({required this.friends, required this.threshold, required this.delayPeriod}); + + factory CreateRecovery._decode(_i1.Input input) { + return CreateRecovery( + friends: const _i1.SequenceCodec<_i5.AccountId32>(_i5.AccountId32Codec()).decode(input), + threshold: _i1.U16Codec.codec.decode(input), + delayPeriod: _i1.U32Codec.codec.decode(input), + ); + } + + /// Vec + final List<_i5.AccountId32> friends; + + /// u16 + final int threshold; + + /// BlockNumberFromProviderOf + final int delayPeriod; + + @override + Map> toJson() => { + 'create_recovery': { + 'friends': friends.map((value) => value.toList()).toList(), + 'threshold': threshold, + 'delayPeriod': delayPeriod, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.SequenceCodec<_i5.AccountId32>(_i5.AccountId32Codec()).sizeHint(friends); + size = size + _i1.U16Codec.codec.sizeHint(threshold); + size = size + _i1.U32Codec.codec.sizeHint(delayPeriod); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.SequenceCodec<_i5.AccountId32>(_i5.AccountId32Codec()).encodeTo(friends, output); + _i1.U16Codec.codec.encodeTo(threshold, output); + _i1.U32Codec.codec.encodeTo(delayPeriod, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is CreateRecovery && + _i6.listsEqual(other.friends, friends) && + other.threshold == threshold && + other.delayPeriod == delayPeriod; + + @override + int get hashCode => Object.hash(friends, threshold, delayPeriod); +} + +/// Initiate the process for recovering a recoverable account. +/// +/// Payment: `RecoveryDeposit` balance will be reserved for initiating the +/// recovery process. This deposit will always be repatriated to the account +/// trying to be recovered. See `close_recovery`. +/// +/// The dispatch origin for this call must be _Signed_. +/// +/// Parameters: +/// - `account`: The lost account that you want to recover. This account needs to be +/// recoverable (i.e. have a recovery configuration). +class InitiateRecovery extends Call { + const InitiateRecovery({required this.account}); + + factory InitiateRecovery._decode(_i1.Input input) { + return InitiateRecovery(account: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress account; + + @override + Map>> toJson() => { + 'initiate_recovery': {'account': account.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(account); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i3.MultiAddress.codec.encodeTo(account, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is InitiateRecovery && other.account == account; + + @override + int get hashCode => account.hashCode; +} + +/// Allow a "friend" of a recoverable account to vouch for an active recovery +/// process for that account. +/// +/// The dispatch origin for this call must be _Signed_ and must be a "friend" +/// for the recoverable account. +/// +/// Parameters: +/// - `lost`: The lost account that you want to recover. +/// - `rescuer`: The account trying to rescue the lost account that you want to vouch for. +/// +/// The combination of these two parameters must point to an active recovery +/// process. +class VouchRecovery extends Call { + const VouchRecovery({required this.lost, required this.rescuer}); + + factory VouchRecovery._decode(_i1.Input input) { + return VouchRecovery(lost: _i3.MultiAddress.codec.decode(input), rescuer: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress lost; + + /// AccountIdLookupOf + final _i3.MultiAddress rescuer; + + @override + Map>> toJson() => { + 'vouch_recovery': {'lost': lost.toJson(), 'rescuer': rescuer.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(lost); + size = size + _i3.MultiAddress.codec.sizeHint(rescuer); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i3.MultiAddress.codec.encodeTo(lost, output); + _i3.MultiAddress.codec.encodeTo(rescuer, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is VouchRecovery && other.lost == lost && other.rescuer == rescuer; + + @override + int get hashCode => Object.hash(lost, rescuer); +} + +/// Allow a successful rescuer to claim their recovered account. +/// +/// The dispatch origin for this call must be _Signed_ and must be a "rescuer" +/// who has successfully completed the account recovery process: collected +/// `threshold` or more vouches, waited `delay_period` blocks since initiation. +/// +/// Parameters: +/// - `account`: The lost account that you want to claim has been successfully recovered by +/// you. +class ClaimRecovery extends Call { + const ClaimRecovery({required this.account}); + + factory ClaimRecovery._decode(_i1.Input input) { + return ClaimRecovery(account: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress account; + + @override + Map>> toJson() => { + 'claim_recovery': {'account': account.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(account); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i3.MultiAddress.codec.encodeTo(account, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ClaimRecovery && other.account == account; + + @override + int get hashCode => account.hashCode; +} + +/// As the controller of a recoverable account, close an active recovery +/// process for your account. +/// +/// Payment: By calling this function, the recoverable account will receive +/// the recovery deposit `RecoveryDeposit` placed by the rescuer. +/// +/// The dispatch origin for this call must be _Signed_ and must be a +/// recoverable account with an active recovery process for it. +/// +/// Parameters: +/// - `rescuer`: The account trying to rescue this recoverable account. +class CloseRecovery extends Call { + const CloseRecovery({required this.rescuer}); + + factory CloseRecovery._decode(_i1.Input input) { + return CloseRecovery(rescuer: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress rescuer; + + @override + Map>> toJson() => { + 'close_recovery': {'rescuer': rescuer.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(rescuer); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i3.MultiAddress.codec.encodeTo(rescuer, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is CloseRecovery && other.rescuer == rescuer; + + @override + int get hashCode => rescuer.hashCode; +} + +/// Remove the recovery process for your account. Recovered accounts are still accessible. +/// +/// NOTE: The user must make sure to call `close_recovery` on all active +/// recovery attempts before calling this function else it will fail. +/// +/// Payment: By calling this function the recoverable account will unreserve +/// their recovery configuration deposit. +/// (`ConfigDepositBase` + `FriendDepositFactor` * #_of_friends) +/// +/// The dispatch origin for this call must be _Signed_ and must be a +/// recoverable account (i.e. has a recovery configuration). +class RemoveRecovery extends Call { + const RemoveRecovery(); + + @override + Map toJson() => {'remove_recovery': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + } + + @override + bool operator ==(Object other) => other is RemoveRecovery; + + @override + int get hashCode => runtimeType.hashCode; +} + +/// Cancel the ability to use `as_recovered` for `account`. +/// +/// The dispatch origin for this call must be _Signed_ and registered to +/// be able to make calls on behalf of the recovered account. +/// +/// Parameters: +/// - `account`: The recovered account you are able to call on-behalf-of. +class CancelRecovered extends Call { + const CancelRecovered({required this.account}); + + factory CancelRecovered._decode(_i1.Input input) { + return CancelRecovered(account: _i3.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i3.MultiAddress account; + + @override + Map>> toJson() => { + 'cancel_recovered': {'account': account.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.MultiAddress.codec.sizeHint(account); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i3.MultiAddress.codec.encodeTo(account, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is CancelRecovered && other.account == account; + + @override + int get hashCode => account.hashCode; +} + +/// Poke deposits for recovery configurations and / or active recoveries. +/// +/// This can be used by accounts to possibly lower their locked amount. +/// +/// The dispatch origin for this call must be _Signed_. +/// +/// Parameters: +/// - `maybe_account`: Optional recoverable account for which you have an active recovery +/// and want to adjust the deposit for the active recovery. +/// +/// This function checks both recovery configuration deposit and active recovery deposits +/// of the caller: +/// - If the caller has created a recovery configuration, checks and adjusts its deposit +/// - If the caller has initiated any active recoveries, and provides the account in +/// `maybe_account`, checks and adjusts those deposits +/// +/// If any deposit is updated, the difference will be reserved/unreserved from the caller's +/// account. +/// +/// The transaction is made free if any deposit is updated and paid otherwise. +/// +/// Emits `DepositPoked` if any deposit is updated. +/// Multiple events may be emitted in case both types of deposits are updated. +class PokeDeposit extends Call { + const PokeDeposit({this.maybeAccount}); + + factory PokeDeposit._decode(_i1.Input input) { + return PokeDeposit(maybeAccount: const _i1.OptionCodec<_i3.MultiAddress>(_i3.MultiAddress.codec).decode(input)); + } + + /// Option> + final _i3.MultiAddress? maybeAccount; + + @override + Map?>> toJson() => { + 'poke_deposit': {'maybeAccount': maybeAccount?.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.OptionCodec<_i3.MultiAddress>(_i3.MultiAddress.codec).sizeHint(maybeAccount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + const _i1.OptionCodec<_i3.MultiAddress>(_i3.MultiAddress.codec).encodeTo(maybeAccount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is PokeDeposit && other.maybeAccount == maybeAccount; + + @override + int get hashCode => maybeAccount.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/error.dart new file mode 100644 index 00000000..7a117318 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/error.dart @@ -0,0 +1,123 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// User is not allowed to make a call on behalf of this account + notAllowed('NotAllowed', 0), + + /// Threshold must be greater than zero + zeroThreshold('ZeroThreshold', 1), + + /// Friends list must be greater than zero and threshold + notEnoughFriends('NotEnoughFriends', 2), + + /// Friends list must be less than max friends + maxFriends('MaxFriends', 3), + + /// Friends list must be sorted and free of duplicates + notSorted('NotSorted', 4), + + /// This account is not set up for recovery + notRecoverable('NotRecoverable', 5), + + /// This account is already set up for recovery + alreadyRecoverable('AlreadyRecoverable', 6), + + /// A recovery process has already started for this account + alreadyStarted('AlreadyStarted', 7), + + /// A recovery process has not started for this rescuer + notStarted('NotStarted', 8), + + /// This account is not a friend who can vouch + notFriend('NotFriend', 9), + + /// The friend must wait until the delay period to vouch for this recovery + delayPeriod('DelayPeriod', 10), + + /// This user has already vouched for this recovery + alreadyVouched('AlreadyVouched', 11), + + /// The threshold for recovering this account has not been met + threshold('Threshold', 12), + + /// There are still active recovery attempts that need to be closed + stillActive('StillActive', 13), + + /// This account is already set up for recovery + alreadyProxy('AlreadyProxy', 14), + + /// Some internal state is broken. + badState('BadState', 15); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.notAllowed; + case 1: + return Error.zeroThreshold; + case 2: + return Error.notEnoughFriends; + case 3: + return Error.maxFriends; + case 4: + return Error.notSorted; + case 5: + return Error.notRecoverable; + case 6: + return Error.alreadyRecoverable; + case 7: + return Error.alreadyStarted; + case 8: + return Error.notStarted; + case 9: + return Error.notFriend; + case 10: + return Error.delayPeriod; + case 11: + return Error.alreadyVouched; + case 12: + return Error.threshold; + case 13: + return Error.stillActive; + case 14: + return Error.alreadyProxy; + case 15: + return Error.badState; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/event.dart new file mode 100644 index 00000000..6544b0b1 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_recovery/pallet/event.dart @@ -0,0 +1,477 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../../sp_core/crypto/account_id32.dart' as _i3; +import '../deposit_kind.dart' as _i4; + +/// Events type. +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + RecoveryCreated recoveryCreated({required _i3.AccountId32 account}) { + return RecoveryCreated(account: account); + } + + RecoveryInitiated recoveryInitiated({required _i3.AccountId32 lostAccount, required _i3.AccountId32 rescuerAccount}) { + return RecoveryInitiated(lostAccount: lostAccount, rescuerAccount: rescuerAccount); + } + + RecoveryVouched recoveryVouched({ + required _i3.AccountId32 lostAccount, + required _i3.AccountId32 rescuerAccount, + required _i3.AccountId32 sender, + }) { + return RecoveryVouched(lostAccount: lostAccount, rescuerAccount: rescuerAccount, sender: sender); + } + + RecoveryClosed recoveryClosed({required _i3.AccountId32 lostAccount, required _i3.AccountId32 rescuerAccount}) { + return RecoveryClosed(lostAccount: lostAccount, rescuerAccount: rescuerAccount); + } + + AccountRecovered accountRecovered({required _i3.AccountId32 lostAccount, required _i3.AccountId32 rescuerAccount}) { + return AccountRecovered(lostAccount: lostAccount, rescuerAccount: rescuerAccount); + } + + RecoveryRemoved recoveryRemoved({required _i3.AccountId32 lostAccount}) { + return RecoveryRemoved(lostAccount: lostAccount); + } + + DepositPoked depositPoked({ + required _i3.AccountId32 who, + required _i4.DepositKind kind, + required BigInt oldDeposit, + required BigInt newDeposit, + }) { + return DepositPoked(who: who, kind: kind, oldDeposit: oldDeposit, newDeposit: newDeposit); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return RecoveryCreated._decode(input); + case 1: + return RecoveryInitiated._decode(input); + case 2: + return RecoveryVouched._decode(input); + case 3: + return RecoveryClosed._decode(input); + case 4: + return AccountRecovered._decode(input); + case 5: + return RecoveryRemoved._decode(input); + case 6: + return DepositPoked._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case RecoveryCreated: + (value as RecoveryCreated).encodeTo(output); + break; + case RecoveryInitiated: + (value as RecoveryInitiated).encodeTo(output); + break; + case RecoveryVouched: + (value as RecoveryVouched).encodeTo(output); + break; + case RecoveryClosed: + (value as RecoveryClosed).encodeTo(output); + break; + case AccountRecovered: + (value as AccountRecovered).encodeTo(output); + break; + case RecoveryRemoved: + (value as RecoveryRemoved).encodeTo(output); + break; + case DepositPoked: + (value as DepositPoked).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case RecoveryCreated: + return (value as RecoveryCreated)._sizeHint(); + case RecoveryInitiated: + return (value as RecoveryInitiated)._sizeHint(); + case RecoveryVouched: + return (value as RecoveryVouched)._sizeHint(); + case RecoveryClosed: + return (value as RecoveryClosed)._sizeHint(); + case AccountRecovered: + return (value as AccountRecovered)._sizeHint(); + case RecoveryRemoved: + return (value as RecoveryRemoved)._sizeHint(); + case DepositPoked: + return (value as DepositPoked)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A recovery process has been set up for an account. +class RecoveryCreated extends Event { + const RecoveryCreated({required this.account}); + + factory RecoveryCreated._decode(_i1.Input input) { + return RecoveryCreated(account: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 account; + + @override + Map>> toJson() => { + 'RecoveryCreated': {'account': account.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(account); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(account, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RecoveryCreated && _i5.listsEqual(other.account, account); + + @override + int get hashCode => account.hashCode; +} + +/// A recovery process has been initiated for lost account by rescuer account. +class RecoveryInitiated extends Event { + const RecoveryInitiated({required this.lostAccount, required this.rescuerAccount}); + + factory RecoveryInitiated._decode(_i1.Input input) { + return RecoveryInitiated( + lostAccount: const _i1.U8ArrayCodec(32).decode(input), + rescuerAccount: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 lostAccount; + + /// T::AccountId + final _i3.AccountId32 rescuerAccount; + + @override + Map>> toJson() => { + 'RecoveryInitiated': {'lostAccount': lostAccount.toList(), 'rescuerAccount': rescuerAccount.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(lostAccount); + size = size + const _i3.AccountId32Codec().sizeHint(rescuerAccount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(lostAccount, output); + const _i1.U8ArrayCodec(32).encodeTo(rescuerAccount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RecoveryInitiated && + _i5.listsEqual(other.lostAccount, lostAccount) && + _i5.listsEqual(other.rescuerAccount, rescuerAccount); + + @override + int get hashCode => Object.hash(lostAccount, rescuerAccount); +} + +/// A recovery process for lost account by rescuer account has been vouched for by sender. +class RecoveryVouched extends Event { + const RecoveryVouched({required this.lostAccount, required this.rescuerAccount, required this.sender}); + + factory RecoveryVouched._decode(_i1.Input input) { + return RecoveryVouched( + lostAccount: const _i1.U8ArrayCodec(32).decode(input), + rescuerAccount: const _i1.U8ArrayCodec(32).decode(input), + sender: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 lostAccount; + + /// T::AccountId + final _i3.AccountId32 rescuerAccount; + + /// T::AccountId + final _i3.AccountId32 sender; + + @override + Map>> toJson() => { + 'RecoveryVouched': { + 'lostAccount': lostAccount.toList(), + 'rescuerAccount': rescuerAccount.toList(), + 'sender': sender.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(lostAccount); + size = size + const _i3.AccountId32Codec().sizeHint(rescuerAccount); + size = size + const _i3.AccountId32Codec().sizeHint(sender); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(lostAccount, output); + const _i1.U8ArrayCodec(32).encodeTo(rescuerAccount, output); + const _i1.U8ArrayCodec(32).encodeTo(sender, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RecoveryVouched && + _i5.listsEqual(other.lostAccount, lostAccount) && + _i5.listsEqual(other.rescuerAccount, rescuerAccount) && + _i5.listsEqual(other.sender, sender); + + @override + int get hashCode => Object.hash(lostAccount, rescuerAccount, sender); +} + +/// A recovery process for lost account by rescuer account has been closed. +class RecoveryClosed extends Event { + const RecoveryClosed({required this.lostAccount, required this.rescuerAccount}); + + factory RecoveryClosed._decode(_i1.Input input) { + return RecoveryClosed( + lostAccount: const _i1.U8ArrayCodec(32).decode(input), + rescuerAccount: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 lostAccount; + + /// T::AccountId + final _i3.AccountId32 rescuerAccount; + + @override + Map>> toJson() => { + 'RecoveryClosed': {'lostAccount': lostAccount.toList(), 'rescuerAccount': rescuerAccount.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(lostAccount); + size = size + const _i3.AccountId32Codec().sizeHint(rescuerAccount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(lostAccount, output); + const _i1.U8ArrayCodec(32).encodeTo(rescuerAccount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RecoveryClosed && + _i5.listsEqual(other.lostAccount, lostAccount) && + _i5.listsEqual(other.rescuerAccount, rescuerAccount); + + @override + int get hashCode => Object.hash(lostAccount, rescuerAccount); +} + +/// Lost account has been successfully recovered by rescuer account. +class AccountRecovered extends Event { + const AccountRecovered({required this.lostAccount, required this.rescuerAccount}); + + factory AccountRecovered._decode(_i1.Input input) { + return AccountRecovered( + lostAccount: const _i1.U8ArrayCodec(32).decode(input), + rescuerAccount: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 lostAccount; + + /// T::AccountId + final _i3.AccountId32 rescuerAccount; + + @override + Map>> toJson() => { + 'AccountRecovered': {'lostAccount': lostAccount.toList(), 'rescuerAccount': rescuerAccount.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(lostAccount); + size = size + const _i3.AccountId32Codec().sizeHint(rescuerAccount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(lostAccount, output); + const _i1.U8ArrayCodec(32).encodeTo(rescuerAccount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AccountRecovered && + _i5.listsEqual(other.lostAccount, lostAccount) && + _i5.listsEqual(other.rescuerAccount, rescuerAccount); + + @override + int get hashCode => Object.hash(lostAccount, rescuerAccount); +} + +/// A recovery process has been removed for an account. +class RecoveryRemoved extends Event { + const RecoveryRemoved({required this.lostAccount}); + + factory RecoveryRemoved._decode(_i1.Input input) { + return RecoveryRemoved(lostAccount: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 lostAccount; + + @override + Map>> toJson() => { + 'RecoveryRemoved': {'lostAccount': lostAccount.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(lostAccount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.U8ArrayCodec(32).encodeTo(lostAccount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RecoveryRemoved && _i5.listsEqual(other.lostAccount, lostAccount); + + @override + int get hashCode => lostAccount.hashCode; +} + +/// A deposit has been updated. +class DepositPoked extends Event { + const DepositPoked({required this.who, required this.kind, required this.oldDeposit, required this.newDeposit}); + + factory DepositPoked._decode(_i1.Input input) { + return DepositPoked( + who: const _i1.U8ArrayCodec(32).decode(input), + kind: _i4.DepositKind.codec.decode(input), + oldDeposit: _i1.U128Codec.codec.decode(input), + newDeposit: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// DepositKind + final _i4.DepositKind kind; + + /// BalanceOf + final BigInt oldDeposit; + + /// BalanceOf + final BigInt newDeposit; + + @override + Map> toJson() => { + 'DepositPoked': {'who': who.toList(), 'kind': kind.toJson(), 'oldDeposit': oldDeposit, 'newDeposit': newDeposit}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i4.DepositKind.codec.sizeHint(kind); + size = size + _i1.U128Codec.codec.sizeHint(oldDeposit); + size = size + _i1.U128Codec.codec.sizeHint(newDeposit); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i4.DepositKind.codec.encodeTo(kind, output); + _i1.U128Codec.codec.encodeTo(oldDeposit, output); + _i1.U128Codec.codec.encodeTo(newDeposit, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DepositPoked && + _i5.listsEqual(other.who, who) && + other.kind == kind && + other.oldDeposit == oldDeposit && + other.newDeposit == newDeposit; + + @override + int get hashCode => Object.hash(who, kind, oldDeposit, newDeposit); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_recovery/recovery_config.dart b/quantus_sdk/lib/generated/planck/types/pallet_recovery/recovery_config.dart new file mode 100644 index 00000000..2a6992a2 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_recovery/recovery_config.dart @@ -0,0 +1,89 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../sp_core/crypto/account_id32.dart' as _i2; + +class RecoveryConfig { + const RecoveryConfig({ + required this.delayPeriod, + required this.deposit, + required this.friends, + required this.threshold, + }); + + factory RecoveryConfig.decode(_i1.Input input) { + return codec.decode(input); + } + + /// BlockNumber + final int delayPeriod; + + /// Balance + final BigInt deposit; + + /// Friends + final List<_i2.AccountId32> friends; + + /// u16 + final int threshold; + + static const $RecoveryConfigCodec codec = $RecoveryConfigCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'delayPeriod': delayPeriod, + 'deposit': deposit, + 'friends': friends.map((value) => value.toList()).toList(), + 'threshold': threshold, + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RecoveryConfig && + other.delayPeriod == delayPeriod && + other.deposit == deposit && + _i4.listsEqual(other.friends, friends) && + other.threshold == threshold; + + @override + int get hashCode => Object.hash(delayPeriod, deposit, friends, threshold); +} + +class $RecoveryConfigCodec with _i1.Codec { + const $RecoveryConfigCodec(); + + @override + void encodeTo(RecoveryConfig obj, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(obj.delayPeriod, output); + _i1.U128Codec.codec.encodeTo(obj.deposit, output); + const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).encodeTo(obj.friends, output); + _i1.U16Codec.codec.encodeTo(obj.threshold, output); + } + + @override + RecoveryConfig decode(_i1.Input input) { + return RecoveryConfig( + delayPeriod: _i1.U32Codec.codec.decode(input), + deposit: _i1.U128Codec.codec.decode(input), + friends: const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).decode(input), + threshold: _i1.U16Codec.codec.decode(input), + ); + } + + @override + int sizeHint(RecoveryConfig obj) { + int size = 0; + size = size + _i1.U32Codec.codec.sizeHint(obj.delayPeriod); + size = size + _i1.U128Codec.codec.sizeHint(obj.deposit); + size = size + const _i1.SequenceCodec<_i2.AccountId32>(_i2.AccountId32Codec()).sizeHint(obj.friends); + size = size + _i1.U16Codec.codec.sizeHint(obj.threshold); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/call_1.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/call_1.dart new file mode 100644 index 00000000..bb798313 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/call_1.dart @@ -0,0 +1,562 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../frame_support/traits/preimages/bounded.dart' as _i4; +import '../../frame_support/traits/schedule/dispatch_time.dart' as _i5; +import '../../primitive_types/h256.dart' as _i6; +import '../../quantus_runtime/origin_caller.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + Submit submit({ + required _i3.OriginCaller proposalOrigin, + required _i4.Bounded proposal, + required _i5.DispatchTime enactmentMoment, + }) { + return Submit(proposalOrigin: proposalOrigin, proposal: proposal, enactmentMoment: enactmentMoment); + } + + PlaceDecisionDeposit placeDecisionDeposit({required int index}) { + return PlaceDecisionDeposit(index: index); + } + + RefundDecisionDeposit refundDecisionDeposit({required int index}) { + return RefundDecisionDeposit(index: index); + } + + Cancel cancel({required int index}) { + return Cancel(index: index); + } + + Kill kill({required int index}) { + return Kill(index: index); + } + + NudgeReferendum nudgeReferendum({required int index}) { + return NudgeReferendum(index: index); + } + + OneFewerDeciding oneFewerDeciding({required int track}) { + return OneFewerDeciding(track: track); + } + + RefundSubmissionDeposit refundSubmissionDeposit({required int index}) { + return RefundSubmissionDeposit(index: index); + } + + SetMetadata setMetadata({required int index, _i6.H256? maybeHash}) { + return SetMetadata(index: index, maybeHash: maybeHash); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Submit._decode(input); + case 1: + return PlaceDecisionDeposit._decode(input); + case 2: + return RefundDecisionDeposit._decode(input); + case 3: + return Cancel._decode(input); + case 4: + return Kill._decode(input); + case 5: + return NudgeReferendum._decode(input); + case 6: + return OneFewerDeciding._decode(input); + case 7: + return RefundSubmissionDeposit._decode(input); + case 8: + return SetMetadata._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Submit: + (value as Submit).encodeTo(output); + break; + case PlaceDecisionDeposit: + (value as PlaceDecisionDeposit).encodeTo(output); + break; + case RefundDecisionDeposit: + (value as RefundDecisionDeposit).encodeTo(output); + break; + case Cancel: + (value as Cancel).encodeTo(output); + break; + case Kill: + (value as Kill).encodeTo(output); + break; + case NudgeReferendum: + (value as NudgeReferendum).encodeTo(output); + break; + case OneFewerDeciding: + (value as OneFewerDeciding).encodeTo(output); + break; + case RefundSubmissionDeposit: + (value as RefundSubmissionDeposit).encodeTo(output); + break; + case SetMetadata: + (value as SetMetadata).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Submit: + return (value as Submit)._sizeHint(); + case PlaceDecisionDeposit: + return (value as PlaceDecisionDeposit)._sizeHint(); + case RefundDecisionDeposit: + return (value as RefundDecisionDeposit)._sizeHint(); + case Cancel: + return (value as Cancel)._sizeHint(); + case Kill: + return (value as Kill)._sizeHint(); + case NudgeReferendum: + return (value as NudgeReferendum)._sizeHint(); + case OneFewerDeciding: + return (value as OneFewerDeciding)._sizeHint(); + case RefundSubmissionDeposit: + return (value as RefundSubmissionDeposit)._sizeHint(); + case SetMetadata: + return (value as SetMetadata)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Propose a referendum on a privileged action. +/// +/// - `origin`: must be `SubmitOrigin` and the account must have `SubmissionDeposit` funds +/// available. +/// - `proposal_origin`: The origin from which the proposal should be executed. +/// - `proposal`: The proposal. +/// - `enactment_moment`: The moment that the proposal should be enacted. +/// +/// Emits `Submitted`. +class Submit extends Call { + const Submit({required this.proposalOrigin, required this.proposal, required this.enactmentMoment}); + + factory Submit._decode(_i1.Input input) { + return Submit( + proposalOrigin: _i3.OriginCaller.codec.decode(input), + proposal: _i4.Bounded.codec.decode(input), + enactmentMoment: _i5.DispatchTime.codec.decode(input), + ); + } + + /// Box> + final _i3.OriginCaller proposalOrigin; + + /// BoundedCallOf + final _i4.Bounded proposal; + + /// DispatchTime> + final _i5.DispatchTime enactmentMoment; + + @override + Map>> toJson() => { + 'submit': { + 'proposalOrigin': proposalOrigin.toJson(), + 'proposal': proposal.toJson(), + 'enactmentMoment': enactmentMoment.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.OriginCaller.codec.sizeHint(proposalOrigin); + size = size + _i4.Bounded.codec.sizeHint(proposal); + size = size + _i5.DispatchTime.codec.sizeHint(enactmentMoment); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.OriginCaller.codec.encodeTo(proposalOrigin, output); + _i4.Bounded.codec.encodeTo(proposal, output); + _i5.DispatchTime.codec.encodeTo(enactmentMoment, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Submit && + other.proposalOrigin == proposalOrigin && + other.proposal == proposal && + other.enactmentMoment == enactmentMoment; + + @override + int get hashCode => Object.hash(proposalOrigin, proposal, enactmentMoment); +} + +/// Post the Decision Deposit for a referendum. +/// +/// - `origin`: must be `Signed` and the account must have funds available for the +/// referendum's track's Decision Deposit. +/// - `index`: The index of the submitted referendum whose Decision Deposit is yet to be +/// posted. +/// +/// Emits `DecisionDepositPlaced`. +class PlaceDecisionDeposit extends Call { + const PlaceDecisionDeposit({required this.index}); + + factory PlaceDecisionDeposit._decode(_i1.Input input) { + return PlaceDecisionDeposit(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'place_decision_deposit': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is PlaceDecisionDeposit && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Refund the Decision Deposit for a closed referendum back to the depositor. +/// +/// - `origin`: must be `Signed` or `Root`. +/// - `index`: The index of a closed referendum whose Decision Deposit has not yet been +/// refunded. +/// +/// Emits `DecisionDepositRefunded`. +class RefundDecisionDeposit extends Call { + const RefundDecisionDeposit({required this.index}); + + factory RefundDecisionDeposit._decode(_i1.Input input) { + return RefundDecisionDeposit(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'refund_decision_deposit': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is RefundDecisionDeposit && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Cancel an ongoing referendum. +/// +/// - `origin`: must be the `CancelOrigin`. +/// - `index`: The index of the referendum to be cancelled. +/// +/// Emits `Cancelled`. +class Cancel extends Call { + const Cancel({required this.index}); + + factory Cancel._decode(_i1.Input input) { + return Cancel(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'cancel': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Cancel && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Cancel an ongoing referendum and slash the deposits. +/// +/// - `origin`: must be the `KillOrigin`. +/// - `index`: The index of the referendum to be cancelled. +/// +/// Emits `Killed` and `DepositSlashed`. +class Kill extends Call { + const Kill({required this.index}); + + factory Kill._decode(_i1.Input input) { + return Kill(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'kill': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Kill && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Advance a referendum onto its next logical state. Only used internally. +/// +/// - `origin`: must be `Root`. +/// - `index`: the referendum to be advanced. +class NudgeReferendum extends Call { + const NudgeReferendum({required this.index}); + + factory NudgeReferendum._decode(_i1.Input input) { + return NudgeReferendum(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'nudge_referendum': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is NudgeReferendum && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Advance a track onto its next logical state. Only used internally. +/// +/// - `origin`: must be `Root`. +/// - `track`: the track to be advanced. +/// +/// Action item for when there is now one fewer referendum in the deciding phase and the +/// `DecidingCount` is not yet updated. This means that we should either: +/// - begin deciding another referendum (and leave `DecidingCount` alone); or +/// - decrement `DecidingCount`. +class OneFewerDeciding extends Call { + const OneFewerDeciding({required this.track}); + + factory OneFewerDeciding._decode(_i1.Input input) { + return OneFewerDeciding(track: _i1.U16Codec.codec.decode(input)); + } + + /// TrackIdOf + final int track; + + @override + Map> toJson() => { + 'one_fewer_deciding': {'track': track}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U16Codec.codec.sizeHint(track); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.U16Codec.codec.encodeTo(track, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is OneFewerDeciding && other.track == track; + + @override + int get hashCode => track.hashCode; +} + +/// Refund the Submission Deposit for a closed referendum back to the depositor. +/// +/// - `origin`: must be `Signed` or `Root`. +/// - `index`: The index of a closed referendum whose Submission Deposit has not yet been +/// refunded. +/// +/// Emits `SubmissionDepositRefunded`. +class RefundSubmissionDeposit extends Call { + const RefundSubmissionDeposit({required this.index}); + + factory RefundSubmissionDeposit._decode(_i1.Input input) { + return RefundSubmissionDeposit(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'refund_submission_deposit': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is RefundSubmissionDeposit && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Set or clear metadata of a referendum. +/// +/// Parameters: +/// - `origin`: Must be `Signed` by a creator of a referendum or by anyone to clear a +/// metadata of a finished referendum. +/// - `index`: The index of a referendum to set or clear metadata for. +/// - `maybe_hash`: The hash of an on-chain stored preimage. `None` to clear a metadata. +class SetMetadata extends Call { + const SetMetadata({required this.index, this.maybeHash}); + + factory SetMetadata._decode(_i1.Input input) { + return SetMetadata( + index: _i1.U32Codec.codec.decode(input), + maybeHash: const _i1.OptionCodec<_i6.H256>(_i6.H256Codec()).decode(input), + ); + } + + /// ReferendumIndex + final int index; + + /// Option + final _i6.H256? maybeHash; + + @override + Map> toJson() => { + 'set_metadata': {'index': index, 'maybeHash': maybeHash?.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i1.OptionCodec<_i6.H256>(_i6.H256Codec()).sizeHint(maybeHash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.OptionCodec<_i6.H256>(_i6.H256Codec()).encodeTo(maybeHash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is SetMetadata && other.index == index && other.maybeHash == maybeHash; + + @override + int get hashCode => Object.hash(index, maybeHash); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/call_2.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/call_2.dart new file mode 100644 index 00000000..bb798313 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/call_2.dart @@ -0,0 +1,562 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../frame_support/traits/preimages/bounded.dart' as _i4; +import '../../frame_support/traits/schedule/dispatch_time.dart' as _i5; +import '../../primitive_types/h256.dart' as _i6; +import '../../quantus_runtime/origin_caller.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + Submit submit({ + required _i3.OriginCaller proposalOrigin, + required _i4.Bounded proposal, + required _i5.DispatchTime enactmentMoment, + }) { + return Submit(proposalOrigin: proposalOrigin, proposal: proposal, enactmentMoment: enactmentMoment); + } + + PlaceDecisionDeposit placeDecisionDeposit({required int index}) { + return PlaceDecisionDeposit(index: index); + } + + RefundDecisionDeposit refundDecisionDeposit({required int index}) { + return RefundDecisionDeposit(index: index); + } + + Cancel cancel({required int index}) { + return Cancel(index: index); + } + + Kill kill({required int index}) { + return Kill(index: index); + } + + NudgeReferendum nudgeReferendum({required int index}) { + return NudgeReferendum(index: index); + } + + OneFewerDeciding oneFewerDeciding({required int track}) { + return OneFewerDeciding(track: track); + } + + RefundSubmissionDeposit refundSubmissionDeposit({required int index}) { + return RefundSubmissionDeposit(index: index); + } + + SetMetadata setMetadata({required int index, _i6.H256? maybeHash}) { + return SetMetadata(index: index, maybeHash: maybeHash); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Submit._decode(input); + case 1: + return PlaceDecisionDeposit._decode(input); + case 2: + return RefundDecisionDeposit._decode(input); + case 3: + return Cancel._decode(input); + case 4: + return Kill._decode(input); + case 5: + return NudgeReferendum._decode(input); + case 6: + return OneFewerDeciding._decode(input); + case 7: + return RefundSubmissionDeposit._decode(input); + case 8: + return SetMetadata._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Submit: + (value as Submit).encodeTo(output); + break; + case PlaceDecisionDeposit: + (value as PlaceDecisionDeposit).encodeTo(output); + break; + case RefundDecisionDeposit: + (value as RefundDecisionDeposit).encodeTo(output); + break; + case Cancel: + (value as Cancel).encodeTo(output); + break; + case Kill: + (value as Kill).encodeTo(output); + break; + case NudgeReferendum: + (value as NudgeReferendum).encodeTo(output); + break; + case OneFewerDeciding: + (value as OneFewerDeciding).encodeTo(output); + break; + case RefundSubmissionDeposit: + (value as RefundSubmissionDeposit).encodeTo(output); + break; + case SetMetadata: + (value as SetMetadata).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Submit: + return (value as Submit)._sizeHint(); + case PlaceDecisionDeposit: + return (value as PlaceDecisionDeposit)._sizeHint(); + case RefundDecisionDeposit: + return (value as RefundDecisionDeposit)._sizeHint(); + case Cancel: + return (value as Cancel)._sizeHint(); + case Kill: + return (value as Kill)._sizeHint(); + case NudgeReferendum: + return (value as NudgeReferendum)._sizeHint(); + case OneFewerDeciding: + return (value as OneFewerDeciding)._sizeHint(); + case RefundSubmissionDeposit: + return (value as RefundSubmissionDeposit)._sizeHint(); + case SetMetadata: + return (value as SetMetadata)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Propose a referendum on a privileged action. +/// +/// - `origin`: must be `SubmitOrigin` and the account must have `SubmissionDeposit` funds +/// available. +/// - `proposal_origin`: The origin from which the proposal should be executed. +/// - `proposal`: The proposal. +/// - `enactment_moment`: The moment that the proposal should be enacted. +/// +/// Emits `Submitted`. +class Submit extends Call { + const Submit({required this.proposalOrigin, required this.proposal, required this.enactmentMoment}); + + factory Submit._decode(_i1.Input input) { + return Submit( + proposalOrigin: _i3.OriginCaller.codec.decode(input), + proposal: _i4.Bounded.codec.decode(input), + enactmentMoment: _i5.DispatchTime.codec.decode(input), + ); + } + + /// Box> + final _i3.OriginCaller proposalOrigin; + + /// BoundedCallOf + final _i4.Bounded proposal; + + /// DispatchTime> + final _i5.DispatchTime enactmentMoment; + + @override + Map>> toJson() => { + 'submit': { + 'proposalOrigin': proposalOrigin.toJson(), + 'proposal': proposal.toJson(), + 'enactmentMoment': enactmentMoment.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.OriginCaller.codec.sizeHint(proposalOrigin); + size = size + _i4.Bounded.codec.sizeHint(proposal); + size = size + _i5.DispatchTime.codec.sizeHint(enactmentMoment); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.OriginCaller.codec.encodeTo(proposalOrigin, output); + _i4.Bounded.codec.encodeTo(proposal, output); + _i5.DispatchTime.codec.encodeTo(enactmentMoment, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Submit && + other.proposalOrigin == proposalOrigin && + other.proposal == proposal && + other.enactmentMoment == enactmentMoment; + + @override + int get hashCode => Object.hash(proposalOrigin, proposal, enactmentMoment); +} + +/// Post the Decision Deposit for a referendum. +/// +/// - `origin`: must be `Signed` and the account must have funds available for the +/// referendum's track's Decision Deposit. +/// - `index`: The index of the submitted referendum whose Decision Deposit is yet to be +/// posted. +/// +/// Emits `DecisionDepositPlaced`. +class PlaceDecisionDeposit extends Call { + const PlaceDecisionDeposit({required this.index}); + + factory PlaceDecisionDeposit._decode(_i1.Input input) { + return PlaceDecisionDeposit(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'place_decision_deposit': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is PlaceDecisionDeposit && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Refund the Decision Deposit for a closed referendum back to the depositor. +/// +/// - `origin`: must be `Signed` or `Root`. +/// - `index`: The index of a closed referendum whose Decision Deposit has not yet been +/// refunded. +/// +/// Emits `DecisionDepositRefunded`. +class RefundDecisionDeposit extends Call { + const RefundDecisionDeposit({required this.index}); + + factory RefundDecisionDeposit._decode(_i1.Input input) { + return RefundDecisionDeposit(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'refund_decision_deposit': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is RefundDecisionDeposit && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Cancel an ongoing referendum. +/// +/// - `origin`: must be the `CancelOrigin`. +/// - `index`: The index of the referendum to be cancelled. +/// +/// Emits `Cancelled`. +class Cancel extends Call { + const Cancel({required this.index}); + + factory Cancel._decode(_i1.Input input) { + return Cancel(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'cancel': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Cancel && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Cancel an ongoing referendum and slash the deposits. +/// +/// - `origin`: must be the `KillOrigin`. +/// - `index`: The index of the referendum to be cancelled. +/// +/// Emits `Killed` and `DepositSlashed`. +class Kill extends Call { + const Kill({required this.index}); + + factory Kill._decode(_i1.Input input) { + return Kill(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'kill': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Kill && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Advance a referendum onto its next logical state. Only used internally. +/// +/// - `origin`: must be `Root`. +/// - `index`: the referendum to be advanced. +class NudgeReferendum extends Call { + const NudgeReferendum({required this.index}); + + factory NudgeReferendum._decode(_i1.Input input) { + return NudgeReferendum(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'nudge_referendum': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is NudgeReferendum && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Advance a track onto its next logical state. Only used internally. +/// +/// - `origin`: must be `Root`. +/// - `track`: the track to be advanced. +/// +/// Action item for when there is now one fewer referendum in the deciding phase and the +/// `DecidingCount` is not yet updated. This means that we should either: +/// - begin deciding another referendum (and leave `DecidingCount` alone); or +/// - decrement `DecidingCount`. +class OneFewerDeciding extends Call { + const OneFewerDeciding({required this.track}); + + factory OneFewerDeciding._decode(_i1.Input input) { + return OneFewerDeciding(track: _i1.U16Codec.codec.decode(input)); + } + + /// TrackIdOf + final int track; + + @override + Map> toJson() => { + 'one_fewer_deciding': {'track': track}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U16Codec.codec.sizeHint(track); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.U16Codec.codec.encodeTo(track, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is OneFewerDeciding && other.track == track; + + @override + int get hashCode => track.hashCode; +} + +/// Refund the Submission Deposit for a closed referendum back to the depositor. +/// +/// - `origin`: must be `Signed` or `Root`. +/// - `index`: The index of a closed referendum whose Submission Deposit has not yet been +/// refunded. +/// +/// Emits `SubmissionDepositRefunded`. +class RefundSubmissionDeposit extends Call { + const RefundSubmissionDeposit({required this.index}); + + factory RefundSubmissionDeposit._decode(_i1.Input input) { + return RefundSubmissionDeposit(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + final int index; + + @override + Map> toJson() => { + 'refund_submission_deposit': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is RefundSubmissionDeposit && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// Set or clear metadata of a referendum. +/// +/// Parameters: +/// - `origin`: Must be `Signed` by a creator of a referendum or by anyone to clear a +/// metadata of a finished referendum. +/// - `index`: The index of a referendum to set or clear metadata for. +/// - `maybe_hash`: The hash of an on-chain stored preimage. `None` to clear a metadata. +class SetMetadata extends Call { + const SetMetadata({required this.index, this.maybeHash}); + + factory SetMetadata._decode(_i1.Input input) { + return SetMetadata( + index: _i1.U32Codec.codec.decode(input), + maybeHash: const _i1.OptionCodec<_i6.H256>(_i6.H256Codec()).decode(input), + ); + } + + /// ReferendumIndex + final int index; + + /// Option + final _i6.H256? maybeHash; + + @override + Map> toJson() => { + 'set_metadata': {'index': index, 'maybeHash': maybeHash?.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i1.OptionCodec<_i6.H256>(_i6.H256Codec()).sizeHint(maybeHash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.OptionCodec<_i6.H256>(_i6.H256Codec()).encodeTo(maybeHash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is SetMetadata && other.index == index && other.maybeHash == maybeHash; + + @override + int get hashCode => Object.hash(index, maybeHash); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/error_1.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/error_1.dart new file mode 100644 index 00000000..79d21e4a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/error_1.dart @@ -0,0 +1,113 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Referendum is not ongoing. + notOngoing('NotOngoing', 0), + + /// Referendum's decision deposit is already paid. + hasDeposit('HasDeposit', 1), + + /// The track identifier given was invalid. + badTrack('BadTrack', 2), + + /// There are already a full complement of referenda in progress for this track. + full('Full', 3), + + /// The queue of the track is empty. + queueEmpty('QueueEmpty', 4), + + /// The referendum index provided is invalid in this context. + badReferendum('BadReferendum', 5), + + /// There was nothing to do in the advancement. + nothingToDo('NothingToDo', 6), + + /// No track exists for the proposal origin. + noTrack('NoTrack', 7), + + /// Any deposit cannot be refunded until after the decision is over. + unfinished('Unfinished', 8), + + /// The deposit refunder is not the depositor. + noPermission('NoPermission', 9), + + /// The deposit cannot be refunded since none was made. + noDeposit('NoDeposit', 10), + + /// The referendum status is invalid for this operation. + badStatus('BadStatus', 11), + + /// The preimage does not exist. + preimageNotExist('PreimageNotExist', 12), + + /// The preimage is stored with a different length than the one provided. + preimageStoredWithDifferentLength('PreimageStoredWithDifferentLength', 13); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.notOngoing; + case 1: + return Error.hasDeposit; + case 2: + return Error.badTrack; + case 3: + return Error.full; + case 4: + return Error.queueEmpty; + case 5: + return Error.badReferendum; + case 6: + return Error.nothingToDo; + case 7: + return Error.noTrack; + case 8: + return Error.unfinished; + case 9: + return Error.noPermission; + case 10: + return Error.noDeposit; + case 11: + return Error.badStatus; + case 12: + return Error.preimageNotExist; + case 13: + return Error.preimageStoredWithDifferentLength; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/error_2.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/error_2.dart new file mode 100644 index 00000000..79d21e4a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/error_2.dart @@ -0,0 +1,113 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Referendum is not ongoing. + notOngoing('NotOngoing', 0), + + /// Referendum's decision deposit is already paid. + hasDeposit('HasDeposit', 1), + + /// The track identifier given was invalid. + badTrack('BadTrack', 2), + + /// There are already a full complement of referenda in progress for this track. + full('Full', 3), + + /// The queue of the track is empty. + queueEmpty('QueueEmpty', 4), + + /// The referendum index provided is invalid in this context. + badReferendum('BadReferendum', 5), + + /// There was nothing to do in the advancement. + nothingToDo('NothingToDo', 6), + + /// No track exists for the proposal origin. + noTrack('NoTrack', 7), + + /// Any deposit cannot be refunded until after the decision is over. + unfinished('Unfinished', 8), + + /// The deposit refunder is not the depositor. + noPermission('NoPermission', 9), + + /// The deposit cannot be refunded since none was made. + noDeposit('NoDeposit', 10), + + /// The referendum status is invalid for this operation. + badStatus('BadStatus', 11), + + /// The preimage does not exist. + preimageNotExist('PreimageNotExist', 12), + + /// The preimage is stored with a different length than the one provided. + preimageStoredWithDifferentLength('PreimageStoredWithDifferentLength', 13); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.notOngoing; + case 1: + return Error.hasDeposit; + case 2: + return Error.badTrack; + case 3: + return Error.full; + case 4: + return Error.queueEmpty; + case 5: + return Error.badReferendum; + case 6: + return Error.nothingToDo; + case 7: + return Error.noTrack; + case 8: + return Error.unfinished; + case 9: + return Error.noPermission; + case 10: + return Error.noDeposit; + case 11: + return Error.badStatus; + case 12: + return Error.preimageNotExist; + case 13: + return Error.preimageStoredWithDifferentLength; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/event_1.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/event_1.dart new file mode 100644 index 00000000..1be40aae --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/event_1.dart @@ -0,0 +1,985 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i7; + +import '../../frame_support/traits/preimages/bounded.dart' as _i3; +import '../../pallet_conviction_voting/types/tally.dart' as _i5; +import '../../primitive_types/h256.dart' as _i6; +import '../../sp_core/crypto/account_id32.dart' as _i4; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + Submitted submitted({required int index, required int track, required _i3.Bounded proposal}) { + return Submitted(index: index, track: track, proposal: proposal); + } + + DecisionDepositPlaced decisionDepositPlaced({ + required int index, + required _i4.AccountId32 who, + required BigInt amount, + }) { + return DecisionDepositPlaced(index: index, who: who, amount: amount); + } + + DecisionDepositRefunded decisionDepositRefunded({ + required int index, + required _i4.AccountId32 who, + required BigInt amount, + }) { + return DecisionDepositRefunded(index: index, who: who, amount: amount); + } + + DepositSlashed depositSlashed({required _i4.AccountId32 who, required BigInt amount}) { + return DepositSlashed(who: who, amount: amount); + } + + DecisionStarted decisionStarted({ + required int index, + required int track, + required _i3.Bounded proposal, + required _i5.Tally tally, + }) { + return DecisionStarted(index: index, track: track, proposal: proposal, tally: tally); + } + + ConfirmStarted confirmStarted({required int index}) { + return ConfirmStarted(index: index); + } + + ConfirmAborted confirmAborted({required int index}) { + return ConfirmAborted(index: index); + } + + Confirmed confirmed({required int index, required _i5.Tally tally}) { + return Confirmed(index: index, tally: tally); + } + + Approved approved({required int index}) { + return Approved(index: index); + } + + Rejected rejected({required int index, required _i5.Tally tally}) { + return Rejected(index: index, tally: tally); + } + + TimedOut timedOut({required int index, required _i5.Tally tally}) { + return TimedOut(index: index, tally: tally); + } + + Cancelled cancelled({required int index, required _i5.Tally tally}) { + return Cancelled(index: index, tally: tally); + } + + Killed killed({required int index, required _i5.Tally tally}) { + return Killed(index: index, tally: tally); + } + + SubmissionDepositRefunded submissionDepositRefunded({ + required int index, + required _i4.AccountId32 who, + required BigInt amount, + }) { + return SubmissionDepositRefunded(index: index, who: who, amount: amount); + } + + MetadataSet metadataSet({required int index, required _i6.H256 hash}) { + return MetadataSet(index: index, hash: hash); + } + + MetadataCleared metadataCleared({required int index, required _i6.H256 hash}) { + return MetadataCleared(index: index, hash: hash); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Submitted._decode(input); + case 1: + return DecisionDepositPlaced._decode(input); + case 2: + return DecisionDepositRefunded._decode(input); + case 3: + return DepositSlashed._decode(input); + case 4: + return DecisionStarted._decode(input); + case 5: + return ConfirmStarted._decode(input); + case 6: + return ConfirmAborted._decode(input); + case 7: + return Confirmed._decode(input); + case 8: + return Approved._decode(input); + case 9: + return Rejected._decode(input); + case 10: + return TimedOut._decode(input); + case 11: + return Cancelled._decode(input); + case 12: + return Killed._decode(input); + case 13: + return SubmissionDepositRefunded._decode(input); + case 14: + return MetadataSet._decode(input); + case 15: + return MetadataCleared._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Submitted: + (value as Submitted).encodeTo(output); + break; + case DecisionDepositPlaced: + (value as DecisionDepositPlaced).encodeTo(output); + break; + case DecisionDepositRefunded: + (value as DecisionDepositRefunded).encodeTo(output); + break; + case DepositSlashed: + (value as DepositSlashed).encodeTo(output); + break; + case DecisionStarted: + (value as DecisionStarted).encodeTo(output); + break; + case ConfirmStarted: + (value as ConfirmStarted).encodeTo(output); + break; + case ConfirmAborted: + (value as ConfirmAborted).encodeTo(output); + break; + case Confirmed: + (value as Confirmed).encodeTo(output); + break; + case Approved: + (value as Approved).encodeTo(output); + break; + case Rejected: + (value as Rejected).encodeTo(output); + break; + case TimedOut: + (value as TimedOut).encodeTo(output); + break; + case Cancelled: + (value as Cancelled).encodeTo(output); + break; + case Killed: + (value as Killed).encodeTo(output); + break; + case SubmissionDepositRefunded: + (value as SubmissionDepositRefunded).encodeTo(output); + break; + case MetadataSet: + (value as MetadataSet).encodeTo(output); + break; + case MetadataCleared: + (value as MetadataCleared).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Submitted: + return (value as Submitted)._sizeHint(); + case DecisionDepositPlaced: + return (value as DecisionDepositPlaced)._sizeHint(); + case DecisionDepositRefunded: + return (value as DecisionDepositRefunded)._sizeHint(); + case DepositSlashed: + return (value as DepositSlashed)._sizeHint(); + case DecisionStarted: + return (value as DecisionStarted)._sizeHint(); + case ConfirmStarted: + return (value as ConfirmStarted)._sizeHint(); + case ConfirmAborted: + return (value as ConfirmAborted)._sizeHint(); + case Confirmed: + return (value as Confirmed)._sizeHint(); + case Approved: + return (value as Approved)._sizeHint(); + case Rejected: + return (value as Rejected)._sizeHint(); + case TimedOut: + return (value as TimedOut)._sizeHint(); + case Cancelled: + return (value as Cancelled)._sizeHint(); + case Killed: + return (value as Killed)._sizeHint(); + case SubmissionDepositRefunded: + return (value as SubmissionDepositRefunded)._sizeHint(); + case MetadataSet: + return (value as MetadataSet)._sizeHint(); + case MetadataCleared: + return (value as MetadataCleared)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A referendum has been submitted. +class Submitted extends Event { + const Submitted({required this.index, required this.track, required this.proposal}); + + factory Submitted._decode(_i1.Input input) { + return Submitted( + index: _i1.U32Codec.codec.decode(input), + track: _i1.U16Codec.codec.decode(input), + proposal: _i3.Bounded.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// TrackIdOf + /// The track (and by extension proposal dispatch origin) of this referendum. + final int track; + + /// BoundedCallOf + /// The proposal for the referendum. + final _i3.Bounded proposal; + + @override + Map> toJson() => { + 'Submitted': {'index': index, 'track': track, 'proposal': proposal.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i1.U16Codec.codec.sizeHint(track); + size = size + _i3.Bounded.codec.sizeHint(proposal); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i1.U16Codec.codec.encodeTo(track, output); + _i3.Bounded.codec.encodeTo(proposal, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Submitted && other.index == index && other.track == track && other.proposal == proposal; + + @override + int get hashCode => Object.hash(index, track, proposal); +} + +/// The decision deposit has been placed. +class DecisionDepositPlaced extends Event { + const DecisionDepositPlaced({required this.index, required this.who, required this.amount}); + + factory DecisionDepositPlaced._decode(_i1.Input input) { + return DecisionDepositPlaced( + index: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::AccountId + /// The account who placed the deposit. + final _i4.AccountId32 who; + + /// BalanceOf + /// The amount placed by the account. + final BigInt amount; + + @override + Map> toJson() => { + 'DecisionDepositPlaced': {'index': index, 'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i4.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DecisionDepositPlaced && + other.index == index && + _i7.listsEqual(other.who, who) && + other.amount == amount; + + @override + int get hashCode => Object.hash(index, who, amount); +} + +/// The decision deposit has been refunded. +class DecisionDepositRefunded extends Event { + const DecisionDepositRefunded({required this.index, required this.who, required this.amount}); + + factory DecisionDepositRefunded._decode(_i1.Input input) { + return DecisionDepositRefunded( + index: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::AccountId + /// The account who placed the deposit. + final _i4.AccountId32 who; + + /// BalanceOf + /// The amount placed by the account. + final BigInt amount; + + @override + Map> toJson() => { + 'DecisionDepositRefunded': {'index': index, 'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i4.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DecisionDepositRefunded && + other.index == index && + _i7.listsEqual(other.who, who) && + other.amount == amount; + + @override + int get hashCode => Object.hash(index, who, amount); +} + +/// A deposit has been slashed. +class DepositSlashed extends Event { + const DepositSlashed({required this.who, required this.amount}); + + factory DepositSlashed._decode(_i1.Input input) { + return DepositSlashed(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + /// The account who placed the deposit. + final _i4.AccountId32 who; + + /// BalanceOf + /// The amount placed by the account. + final BigInt amount; + + @override + Map> toJson() => { + 'DepositSlashed': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i4.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is DepositSlashed && _i7.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// A referendum has moved into the deciding phase. +class DecisionStarted extends Event { + const DecisionStarted({required this.index, required this.track, required this.proposal, required this.tally}); + + factory DecisionStarted._decode(_i1.Input input) { + return DecisionStarted( + index: _i1.U32Codec.codec.decode(input), + track: _i1.U16Codec.codec.decode(input), + proposal: _i3.Bounded.codec.decode(input), + tally: _i5.Tally.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// TrackIdOf + /// The track (and by extension proposal dispatch origin) of this referendum. + final int track; + + /// BoundedCallOf + /// The proposal for the referendum. + final _i3.Bounded proposal; + + /// T::Tally + /// The current tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'DecisionStarted': {'index': index, 'track': track, 'proposal': proposal.toJson(), 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i1.U16Codec.codec.sizeHint(track); + size = size + _i3.Bounded.codec.sizeHint(proposal); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i1.U16Codec.codec.encodeTo(track, output); + _i3.Bounded.codec.encodeTo(proposal, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DecisionStarted && + other.index == index && + other.track == track && + other.proposal == proposal && + other.tally == tally; + + @override + int get hashCode => Object.hash(index, track, proposal, tally); +} + +class ConfirmStarted extends Event { + const ConfirmStarted({required this.index}); + + factory ConfirmStarted._decode(_i1.Input input) { + return ConfirmStarted(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + @override + Map> toJson() => { + 'ConfirmStarted': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ConfirmStarted && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +class ConfirmAborted extends Event { + const ConfirmAborted({required this.index}); + + factory ConfirmAborted._decode(_i1.Input input) { + return ConfirmAborted(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + @override + Map> toJson() => { + 'ConfirmAborted': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ConfirmAborted && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// A referendum has ended its confirmation phase and is ready for approval. +class Confirmed extends Event { + const Confirmed({required this.index, required this.tally}); + + factory Confirmed._decode(_i1.Input input) { + return Confirmed(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Confirmed': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Confirmed && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// A referendum has been approved and its proposal has been scheduled. +class Approved extends Event { + const Approved({required this.index}); + + factory Approved._decode(_i1.Input input) { + return Approved(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + @override + Map> toJson() => { + 'Approved': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Approved && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// A proposal has been rejected by referendum. +class Rejected extends Event { + const Rejected({required this.index, required this.tally}); + + factory Rejected._decode(_i1.Input input) { + return Rejected(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Rejected': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Rejected && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// A referendum has been timed out without being decided. +class TimedOut extends Event { + const TimedOut({required this.index, required this.tally}); + + factory TimedOut._decode(_i1.Input input) { + return TimedOut(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'TimedOut': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TimedOut && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// A referendum has been cancelled. +class Cancelled extends Event { + const Cancelled({required this.index, required this.tally}); + + factory Cancelled._decode(_i1.Input input) { + return Cancelled(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Cancelled': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Cancelled && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// A referendum has been killed. +class Killed extends Event { + const Killed({required this.index, required this.tally}); + + factory Killed._decode(_i1.Input input) { + return Killed(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Killed': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Killed && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// The submission deposit has been refunded. +class SubmissionDepositRefunded extends Event { + const SubmissionDepositRefunded({required this.index, required this.who, required this.amount}); + + factory SubmissionDepositRefunded._decode(_i1.Input input) { + return SubmissionDepositRefunded( + index: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::AccountId + /// The account who placed the deposit. + final _i4.AccountId32 who; + + /// BalanceOf + /// The amount placed by the account. + final BigInt amount; + + @override + Map> toJson() => { + 'SubmissionDepositRefunded': {'index': index, 'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i4.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SubmissionDepositRefunded && + other.index == index && + _i7.listsEqual(other.who, who) && + other.amount == amount; + + @override + int get hashCode => Object.hash(index, who, amount); +} + +/// Metadata for a referendum has been set. +class MetadataSet extends Event { + const MetadataSet({required this.index, required this.hash}); + + factory MetadataSet._decode(_i1.Input input) { + return MetadataSet(index: _i1.U32Codec.codec.decode(input), hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Hash + /// Preimage hash. + final _i6.H256 hash; + + @override + Map> toJson() => { + 'MetadataSet': {'index': index, 'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i6.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is MetadataSet && other.index == index && _i7.listsEqual(other.hash, hash); + + @override + int get hashCode => Object.hash(index, hash); +} + +/// Metadata for a referendum has been cleared. +class MetadataCleared extends Event { + const MetadataCleared({required this.index, required this.hash}); + + factory MetadataCleared._decode(_i1.Input input) { + return MetadataCleared(index: _i1.U32Codec.codec.decode(input), hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Hash + /// Preimage hash. + final _i6.H256 hash; + + @override + Map> toJson() => { + 'MetadataCleared': {'index': index, 'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i6.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(15, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is MetadataCleared && other.index == index && _i7.listsEqual(other.hash, hash); + + @override + int get hashCode => Object.hash(index, hash); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/event_2.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/event_2.dart new file mode 100644 index 00000000..4010a513 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/pallet/event_2.dart @@ -0,0 +1,985 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i7; + +import '../../frame_support/traits/preimages/bounded.dart' as _i3; +import '../../pallet_ranked_collective/tally.dart' as _i5; +import '../../primitive_types/h256.dart' as _i6; +import '../../sp_core/crypto/account_id32.dart' as _i4; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + Submitted submitted({required int index, required int track, required _i3.Bounded proposal}) { + return Submitted(index: index, track: track, proposal: proposal); + } + + DecisionDepositPlaced decisionDepositPlaced({ + required int index, + required _i4.AccountId32 who, + required BigInt amount, + }) { + return DecisionDepositPlaced(index: index, who: who, amount: amount); + } + + DecisionDepositRefunded decisionDepositRefunded({ + required int index, + required _i4.AccountId32 who, + required BigInt amount, + }) { + return DecisionDepositRefunded(index: index, who: who, amount: amount); + } + + DepositSlashed depositSlashed({required _i4.AccountId32 who, required BigInt amount}) { + return DepositSlashed(who: who, amount: amount); + } + + DecisionStarted decisionStarted({ + required int index, + required int track, + required _i3.Bounded proposal, + required _i5.Tally tally, + }) { + return DecisionStarted(index: index, track: track, proposal: proposal, tally: tally); + } + + ConfirmStarted confirmStarted({required int index}) { + return ConfirmStarted(index: index); + } + + ConfirmAborted confirmAborted({required int index}) { + return ConfirmAborted(index: index); + } + + Confirmed confirmed({required int index, required _i5.Tally tally}) { + return Confirmed(index: index, tally: tally); + } + + Approved approved({required int index}) { + return Approved(index: index); + } + + Rejected rejected({required int index, required _i5.Tally tally}) { + return Rejected(index: index, tally: tally); + } + + TimedOut timedOut({required int index, required _i5.Tally tally}) { + return TimedOut(index: index, tally: tally); + } + + Cancelled cancelled({required int index, required _i5.Tally tally}) { + return Cancelled(index: index, tally: tally); + } + + Killed killed({required int index, required _i5.Tally tally}) { + return Killed(index: index, tally: tally); + } + + SubmissionDepositRefunded submissionDepositRefunded({ + required int index, + required _i4.AccountId32 who, + required BigInt amount, + }) { + return SubmissionDepositRefunded(index: index, who: who, amount: amount); + } + + MetadataSet metadataSet({required int index, required _i6.H256 hash}) { + return MetadataSet(index: index, hash: hash); + } + + MetadataCleared metadataCleared({required int index, required _i6.H256 hash}) { + return MetadataCleared(index: index, hash: hash); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Submitted._decode(input); + case 1: + return DecisionDepositPlaced._decode(input); + case 2: + return DecisionDepositRefunded._decode(input); + case 3: + return DepositSlashed._decode(input); + case 4: + return DecisionStarted._decode(input); + case 5: + return ConfirmStarted._decode(input); + case 6: + return ConfirmAborted._decode(input); + case 7: + return Confirmed._decode(input); + case 8: + return Approved._decode(input); + case 9: + return Rejected._decode(input); + case 10: + return TimedOut._decode(input); + case 11: + return Cancelled._decode(input); + case 12: + return Killed._decode(input); + case 13: + return SubmissionDepositRefunded._decode(input); + case 14: + return MetadataSet._decode(input); + case 15: + return MetadataCleared._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Submitted: + (value as Submitted).encodeTo(output); + break; + case DecisionDepositPlaced: + (value as DecisionDepositPlaced).encodeTo(output); + break; + case DecisionDepositRefunded: + (value as DecisionDepositRefunded).encodeTo(output); + break; + case DepositSlashed: + (value as DepositSlashed).encodeTo(output); + break; + case DecisionStarted: + (value as DecisionStarted).encodeTo(output); + break; + case ConfirmStarted: + (value as ConfirmStarted).encodeTo(output); + break; + case ConfirmAborted: + (value as ConfirmAborted).encodeTo(output); + break; + case Confirmed: + (value as Confirmed).encodeTo(output); + break; + case Approved: + (value as Approved).encodeTo(output); + break; + case Rejected: + (value as Rejected).encodeTo(output); + break; + case TimedOut: + (value as TimedOut).encodeTo(output); + break; + case Cancelled: + (value as Cancelled).encodeTo(output); + break; + case Killed: + (value as Killed).encodeTo(output); + break; + case SubmissionDepositRefunded: + (value as SubmissionDepositRefunded).encodeTo(output); + break; + case MetadataSet: + (value as MetadataSet).encodeTo(output); + break; + case MetadataCleared: + (value as MetadataCleared).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Submitted: + return (value as Submitted)._sizeHint(); + case DecisionDepositPlaced: + return (value as DecisionDepositPlaced)._sizeHint(); + case DecisionDepositRefunded: + return (value as DecisionDepositRefunded)._sizeHint(); + case DepositSlashed: + return (value as DepositSlashed)._sizeHint(); + case DecisionStarted: + return (value as DecisionStarted)._sizeHint(); + case ConfirmStarted: + return (value as ConfirmStarted)._sizeHint(); + case ConfirmAborted: + return (value as ConfirmAborted)._sizeHint(); + case Confirmed: + return (value as Confirmed)._sizeHint(); + case Approved: + return (value as Approved)._sizeHint(); + case Rejected: + return (value as Rejected)._sizeHint(); + case TimedOut: + return (value as TimedOut)._sizeHint(); + case Cancelled: + return (value as Cancelled)._sizeHint(); + case Killed: + return (value as Killed)._sizeHint(); + case SubmissionDepositRefunded: + return (value as SubmissionDepositRefunded)._sizeHint(); + case MetadataSet: + return (value as MetadataSet)._sizeHint(); + case MetadataCleared: + return (value as MetadataCleared)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A referendum has been submitted. +class Submitted extends Event { + const Submitted({required this.index, required this.track, required this.proposal}); + + factory Submitted._decode(_i1.Input input) { + return Submitted( + index: _i1.U32Codec.codec.decode(input), + track: _i1.U16Codec.codec.decode(input), + proposal: _i3.Bounded.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// TrackIdOf + /// The track (and by extension proposal dispatch origin) of this referendum. + final int track; + + /// BoundedCallOf + /// The proposal for the referendum. + final _i3.Bounded proposal; + + @override + Map> toJson() => { + 'Submitted': {'index': index, 'track': track, 'proposal': proposal.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i1.U16Codec.codec.sizeHint(track); + size = size + _i3.Bounded.codec.sizeHint(proposal); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i1.U16Codec.codec.encodeTo(track, output); + _i3.Bounded.codec.encodeTo(proposal, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Submitted && other.index == index && other.track == track && other.proposal == proposal; + + @override + int get hashCode => Object.hash(index, track, proposal); +} + +/// The decision deposit has been placed. +class DecisionDepositPlaced extends Event { + const DecisionDepositPlaced({required this.index, required this.who, required this.amount}); + + factory DecisionDepositPlaced._decode(_i1.Input input) { + return DecisionDepositPlaced( + index: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::AccountId + /// The account who placed the deposit. + final _i4.AccountId32 who; + + /// BalanceOf + /// The amount placed by the account. + final BigInt amount; + + @override + Map> toJson() => { + 'DecisionDepositPlaced': {'index': index, 'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i4.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DecisionDepositPlaced && + other.index == index && + _i7.listsEqual(other.who, who) && + other.amount == amount; + + @override + int get hashCode => Object.hash(index, who, amount); +} + +/// The decision deposit has been refunded. +class DecisionDepositRefunded extends Event { + const DecisionDepositRefunded({required this.index, required this.who, required this.amount}); + + factory DecisionDepositRefunded._decode(_i1.Input input) { + return DecisionDepositRefunded( + index: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::AccountId + /// The account who placed the deposit. + final _i4.AccountId32 who; + + /// BalanceOf + /// The amount placed by the account. + final BigInt amount; + + @override + Map> toJson() => { + 'DecisionDepositRefunded': {'index': index, 'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i4.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DecisionDepositRefunded && + other.index == index && + _i7.listsEqual(other.who, who) && + other.amount == amount; + + @override + int get hashCode => Object.hash(index, who, amount); +} + +/// A deposit has been slashed. +class DepositSlashed extends Event { + const DepositSlashed({required this.who, required this.amount}); + + factory DepositSlashed._decode(_i1.Input input) { + return DepositSlashed(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// T::AccountId + /// The account who placed the deposit. + final _i4.AccountId32 who; + + /// BalanceOf + /// The amount placed by the account. + final BigInt amount; + + @override + Map> toJson() => { + 'DepositSlashed': {'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i4.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is DepositSlashed && _i7.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +/// A referendum has moved into the deciding phase. +class DecisionStarted extends Event { + const DecisionStarted({required this.index, required this.track, required this.proposal, required this.tally}); + + factory DecisionStarted._decode(_i1.Input input) { + return DecisionStarted( + index: _i1.U32Codec.codec.decode(input), + track: _i1.U16Codec.codec.decode(input), + proposal: _i3.Bounded.codec.decode(input), + tally: _i5.Tally.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// TrackIdOf + /// The track (and by extension proposal dispatch origin) of this referendum. + final int track; + + /// BoundedCallOf + /// The proposal for the referendum. + final _i3.Bounded proposal; + + /// T::Tally + /// The current tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'DecisionStarted': {'index': index, 'track': track, 'proposal': proposal.toJson(), 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i1.U16Codec.codec.sizeHint(track); + size = size + _i3.Bounded.codec.sizeHint(proposal); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i1.U16Codec.codec.encodeTo(track, output); + _i3.Bounded.codec.encodeTo(proposal, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DecisionStarted && + other.index == index && + other.track == track && + other.proposal == proposal && + other.tally == tally; + + @override + int get hashCode => Object.hash(index, track, proposal, tally); +} + +class ConfirmStarted extends Event { + const ConfirmStarted({required this.index}); + + factory ConfirmStarted._decode(_i1.Input input) { + return ConfirmStarted(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + @override + Map> toJson() => { + 'ConfirmStarted': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ConfirmStarted && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +class ConfirmAborted extends Event { + const ConfirmAborted({required this.index}); + + factory ConfirmAborted._decode(_i1.Input input) { + return ConfirmAborted(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + @override + Map> toJson() => { + 'ConfirmAborted': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ConfirmAborted && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// A referendum has ended its confirmation phase and is ready for approval. +class Confirmed extends Event { + const Confirmed({required this.index, required this.tally}); + + factory Confirmed._decode(_i1.Input input) { + return Confirmed(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Confirmed': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Confirmed && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// A referendum has been approved and its proposal has been scheduled. +class Approved extends Event { + const Approved({required this.index}); + + factory Approved._decode(_i1.Input input) { + return Approved(index: _i1.U32Codec.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + @override + Map> toJson() => { + 'Approved': {'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Approved && other.index == index; + + @override + int get hashCode => index.hashCode; +} + +/// A proposal has been rejected by referendum. +class Rejected extends Event { + const Rejected({required this.index, required this.tally}); + + factory Rejected._decode(_i1.Input input) { + return Rejected(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Rejected': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Rejected && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// A referendum has been timed out without being decided. +class TimedOut extends Event { + const TimedOut({required this.index, required this.tally}); + + factory TimedOut._decode(_i1.Input input) { + return TimedOut(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'TimedOut': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TimedOut && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// A referendum has been cancelled. +class Cancelled extends Event { + const Cancelled({required this.index, required this.tally}); + + factory Cancelled._decode(_i1.Input input) { + return Cancelled(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Cancelled': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Cancelled && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// A referendum has been killed. +class Killed extends Event { + const Killed({required this.index, required this.tally}); + + factory Killed._decode(_i1.Input input) { + return Killed(index: _i1.U32Codec.codec.decode(input), tally: _i5.Tally.codec.decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Tally + /// The final tally of votes in this referendum. + final _i5.Tally tally; + + @override + Map> toJson() => { + 'Killed': {'index': index, 'tally': tally.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i5.Tally.codec.sizeHint(tally); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i5.Tally.codec.encodeTo(tally, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Killed && other.index == index && other.tally == tally; + + @override + int get hashCode => Object.hash(index, tally); +} + +/// The submission deposit has been refunded. +class SubmissionDepositRefunded extends Event { + const SubmissionDepositRefunded({required this.index, required this.who, required this.amount}); + + factory SubmissionDepositRefunded._decode(_i1.Input input) { + return SubmissionDepositRefunded( + index: _i1.U32Codec.codec.decode(input), + who: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::AccountId + /// The account who placed the deposit. + final _i4.AccountId32 who; + + /// BalanceOf + /// The amount placed by the account. + final BigInt amount; + + @override + Map> toJson() => { + 'SubmissionDepositRefunded': {'index': index, 'who': who.toList(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i4.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SubmissionDepositRefunded && + other.index == index && + _i7.listsEqual(other.who, who) && + other.amount == amount; + + @override + int get hashCode => Object.hash(index, who, amount); +} + +/// Metadata for a referendum has been set. +class MetadataSet extends Event { + const MetadataSet({required this.index, required this.hash}); + + factory MetadataSet._decode(_i1.Input input) { + return MetadataSet(index: _i1.U32Codec.codec.decode(input), hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Hash + /// Preimage hash. + final _i6.H256 hash; + + @override + Map> toJson() => { + 'MetadataSet': {'index': index, 'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i6.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is MetadataSet && other.index == index && _i7.listsEqual(other.hash, hash); + + @override + int get hashCode => Object.hash(index, hash); +} + +/// Metadata for a referendum has been cleared. +class MetadataCleared extends Event { + const MetadataCleared({required this.index, required this.hash}); + + factory MetadataCleared._decode(_i1.Input input) { + return MetadataCleared(index: _i1.U32Codec.codec.decode(input), hash: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// ReferendumIndex + /// Index of the referendum. + final int index; + + /// T::Hash + /// Preimage hash. + final _i6.H256 hash; + + @override + Map> toJson() => { + 'MetadataCleared': {'index': index, 'hash': hash.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + const _i6.H256Codec().sizeHint(hash); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(15, output); + _i1.U32Codec.codec.encodeTo(index, output); + const _i1.U8ArrayCodec(32).encodeTo(hash, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is MetadataCleared && other.index == index && _i7.listsEqual(other.hash, hash); + + @override + int get hashCode => Object.hash(index, hash); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/curve.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/curve.dart new file mode 100644 index 00000000..79a32460 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/curve.dart @@ -0,0 +1,263 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../sp_arithmetic/fixed_point/fixed_i64.dart' as _i4; +import '../../sp_arithmetic/per_things/perbill.dart' as _i3; + +abstract class Curve { + const Curve(); + + factory Curve.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CurveCodec codec = $CurveCodec(); + + static const $Curve values = $Curve(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Curve { + const $Curve(); + + LinearDecreasing linearDecreasing({ + required _i3.Perbill length, + required _i3.Perbill floor, + required _i3.Perbill ceil, + }) { + return LinearDecreasing(length: length, floor: floor, ceil: ceil); + } + + SteppedDecreasing steppedDecreasing({ + required _i3.Perbill begin, + required _i3.Perbill end, + required _i3.Perbill step, + required _i3.Perbill period, + }) { + return SteppedDecreasing(begin: begin, end: end, step: step, period: period); + } + + Reciprocal reciprocal({required _i4.FixedI64 factor, required _i4.FixedI64 xOffset, required _i4.FixedI64 yOffset}) { + return Reciprocal(factor: factor, xOffset: xOffset, yOffset: yOffset); + } +} + +class $CurveCodec with _i1.Codec { + const $CurveCodec(); + + @override + Curve decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return LinearDecreasing._decode(input); + case 1: + return SteppedDecreasing._decode(input); + case 2: + return Reciprocal._decode(input); + default: + throw Exception('Curve: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Curve value, _i1.Output output) { + switch (value.runtimeType) { + case LinearDecreasing: + (value as LinearDecreasing).encodeTo(output); + break; + case SteppedDecreasing: + (value as SteppedDecreasing).encodeTo(output); + break; + case Reciprocal: + (value as Reciprocal).encodeTo(output); + break; + default: + throw Exception('Curve: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Curve value) { + switch (value.runtimeType) { + case LinearDecreasing: + return (value as LinearDecreasing)._sizeHint(); + case SteppedDecreasing: + return (value as SteppedDecreasing)._sizeHint(); + case Reciprocal: + return (value as Reciprocal)._sizeHint(); + default: + throw Exception('Curve: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class LinearDecreasing extends Curve { + const LinearDecreasing({required this.length, required this.floor, required this.ceil}); + + factory LinearDecreasing._decode(_i1.Input input) { + return LinearDecreasing( + length: _i1.U32Codec.codec.decode(input), + floor: _i1.U32Codec.codec.decode(input), + ceil: _i1.U32Codec.codec.decode(input), + ); + } + + /// Perbill + final _i3.Perbill length; + + /// Perbill + final _i3.Perbill floor; + + /// Perbill + final _i3.Perbill ceil; + + @override + Map> toJson() => { + 'LinearDecreasing': {'length': length, 'floor': floor, 'ceil': ceil}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.PerbillCodec().sizeHint(length); + size = size + const _i3.PerbillCodec().sizeHint(floor); + size = size + const _i3.PerbillCodec().sizeHint(ceil); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(length, output); + _i1.U32Codec.codec.encodeTo(floor, output); + _i1.U32Codec.codec.encodeTo(ceil, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is LinearDecreasing && other.length == length && other.floor == floor && other.ceil == ceil; + + @override + int get hashCode => Object.hash(length, floor, ceil); +} + +class SteppedDecreasing extends Curve { + const SteppedDecreasing({required this.begin, required this.end, required this.step, required this.period}); + + factory SteppedDecreasing._decode(_i1.Input input) { + return SteppedDecreasing( + begin: _i1.U32Codec.codec.decode(input), + end: _i1.U32Codec.codec.decode(input), + step: _i1.U32Codec.codec.decode(input), + period: _i1.U32Codec.codec.decode(input), + ); + } + + /// Perbill + final _i3.Perbill begin; + + /// Perbill + final _i3.Perbill end; + + /// Perbill + final _i3.Perbill step; + + /// Perbill + final _i3.Perbill period; + + @override + Map> toJson() => { + 'SteppedDecreasing': {'begin': begin, 'end': end, 'step': step, 'period': period}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.PerbillCodec().sizeHint(begin); + size = size + const _i3.PerbillCodec().sizeHint(end); + size = size + const _i3.PerbillCodec().sizeHint(step); + size = size + const _i3.PerbillCodec().sizeHint(period); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(begin, output); + _i1.U32Codec.codec.encodeTo(end, output); + _i1.U32Codec.codec.encodeTo(step, output); + _i1.U32Codec.codec.encodeTo(period, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SteppedDecreasing && + other.begin == begin && + other.end == end && + other.step == step && + other.period == period; + + @override + int get hashCode => Object.hash(begin, end, step, period); +} + +class Reciprocal extends Curve { + const Reciprocal({required this.factor, required this.xOffset, required this.yOffset}); + + factory Reciprocal._decode(_i1.Input input) { + return Reciprocal( + factor: _i1.I64Codec.codec.decode(input), + xOffset: _i1.I64Codec.codec.decode(input), + yOffset: _i1.I64Codec.codec.decode(input), + ); + } + + /// FixedI64 + final _i4.FixedI64 factor; + + /// FixedI64 + final _i4.FixedI64 xOffset; + + /// FixedI64 + final _i4.FixedI64 yOffset; + + @override + Map> toJson() => { + 'Reciprocal': {'factor': factor, 'xOffset': xOffset, 'yOffset': yOffset}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i4.FixedI64Codec().sizeHint(factor); + size = size + const _i4.FixedI64Codec().sizeHint(xOffset); + size = size + const _i4.FixedI64Codec().sizeHint(yOffset); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.I64Codec.codec.encodeTo(factor, output); + _i1.I64Codec.codec.encodeTo(xOffset, output); + _i1.I64Codec.codec.encodeTo(yOffset, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Reciprocal && other.factor == factor && other.xOffset == xOffset && other.yOffset == yOffset; + + @override + int get hashCode => Object.hash(factor, xOffset, yOffset); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/deciding_status.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/deciding_status.dart new file mode 100644 index 00000000..0330544f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/deciding_status.dart @@ -0,0 +1,59 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class DecidingStatus { + const DecidingStatus({required this.since, this.confirming}); + + factory DecidingStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + /// BlockNumber + final int since; + + /// Option + final int? confirming; + + static const $DecidingStatusCodec codec = $DecidingStatusCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'since': since, 'confirming': confirming}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is DecidingStatus && other.since == since && other.confirming == confirming; + + @override + int get hashCode => Object.hash(since, confirming); +} + +class $DecidingStatusCodec with _i1.Codec { + const $DecidingStatusCodec(); + + @override + void encodeTo(DecidingStatus obj, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(obj.since, output); + const _i1.OptionCodec(_i1.U32Codec.codec).encodeTo(obj.confirming, output); + } + + @override + DecidingStatus decode(_i1.Input input) { + return DecidingStatus( + since: _i1.U32Codec.codec.decode(input), + confirming: const _i1.OptionCodec(_i1.U32Codec.codec).decode(input), + ); + } + + @override + int sizeHint(DecidingStatus obj) { + int size = 0; + size = size + _i1.U32Codec.codec.sizeHint(obj.since); + size = size + const _i1.OptionCodec(_i1.U32Codec.codec).sizeHint(obj.confirming); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/deposit.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/deposit.dart new file mode 100644 index 00000000..7477eb42 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/deposit.dart @@ -0,0 +1,59 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i2; + +class Deposit { + const Deposit({required this.who, required this.amount}); + + factory Deposit.decode(_i1.Input input) { + return codec.decode(input); + } + + /// AccountId + final _i2.AccountId32 who; + + /// Balance + final BigInt amount; + + static const $DepositCodec codec = $DepositCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'who': who.toList(), 'amount': amount}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is Deposit && _i4.listsEqual(other.who, who) && other.amount == amount; + + @override + int get hashCode => Object.hash(who, amount); +} + +class $DepositCodec with _i1.Codec { + const $DepositCodec(); + + @override + void encodeTo(Deposit obj, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(obj.who, output); + _i1.U128Codec.codec.encodeTo(obj.amount, output); + } + + @override + Deposit decode(_i1.Input input) { + return Deposit(who: const _i1.U8ArrayCodec(32).decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + @override + int sizeHint(Deposit obj) { + int size = 0; + size = size + const _i2.AccountId32Codec().sizeHint(obj.who); + size = size + _i1.U128Codec.codec.sizeHint(obj.amount); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_info_1.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_info_1.dart new file mode 100644 index 00000000..a4fe6c9b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_info_1.dart @@ -0,0 +1,389 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'deposit.dart' as _i4; +import 'referendum_status_1.dart' as _i3; + +abstract class ReferendumInfo { + const ReferendumInfo(); + + factory ReferendumInfo.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $ReferendumInfoCodec codec = $ReferendumInfoCodec(); + + static const $ReferendumInfo values = $ReferendumInfo(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $ReferendumInfo { + const $ReferendumInfo(); + + Ongoing ongoing(_i3.ReferendumStatus value0) { + return Ongoing(value0); + } + + Approved approved(int value0, _i4.Deposit? value1, _i4.Deposit? value2) { + return Approved(value0, value1, value2); + } + + Rejected rejected(int value0, _i4.Deposit? value1, _i4.Deposit? value2) { + return Rejected(value0, value1, value2); + } + + Cancelled cancelled(int value0, _i4.Deposit? value1, _i4.Deposit? value2) { + return Cancelled(value0, value1, value2); + } + + TimedOut timedOut(int value0, _i4.Deposit? value1, _i4.Deposit? value2) { + return TimedOut(value0, value1, value2); + } + + Killed killed(int value0) { + return Killed(value0); + } +} + +class $ReferendumInfoCodec with _i1.Codec { + const $ReferendumInfoCodec(); + + @override + ReferendumInfo decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Ongoing._decode(input); + case 1: + return Approved._decode(input); + case 2: + return Rejected._decode(input); + case 3: + return Cancelled._decode(input); + case 4: + return TimedOut._decode(input); + case 5: + return Killed._decode(input); + default: + throw Exception('ReferendumInfo: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(ReferendumInfo value, _i1.Output output) { + switch (value.runtimeType) { + case Ongoing: + (value as Ongoing).encodeTo(output); + break; + case Approved: + (value as Approved).encodeTo(output); + break; + case Rejected: + (value as Rejected).encodeTo(output); + break; + case Cancelled: + (value as Cancelled).encodeTo(output); + break; + case TimedOut: + (value as TimedOut).encodeTo(output); + break; + case Killed: + (value as Killed).encodeTo(output); + break; + default: + throw Exception('ReferendumInfo: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(ReferendumInfo value) { + switch (value.runtimeType) { + case Ongoing: + return (value as Ongoing)._sizeHint(); + case Approved: + return (value as Approved)._sizeHint(); + case Rejected: + return (value as Rejected)._sizeHint(); + case Cancelled: + return (value as Cancelled)._sizeHint(); + case TimedOut: + return (value as TimedOut)._sizeHint(); + case Killed: + return (value as Killed)._sizeHint(); + default: + throw Exception('ReferendumInfo: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Ongoing extends ReferendumInfo { + const Ongoing(this.value0); + + factory Ongoing._decode(_i1.Input input) { + return Ongoing(_i3.ReferendumStatus.codec.decode(input)); + } + + /// ReferendumStatus + final _i3.ReferendumStatus value0; + + @override + Map> toJson() => {'Ongoing': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.ReferendumStatus.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.ReferendumStatus.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Ongoing && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Approved extends ReferendumInfo { + const Approved(this.value0, this.value1, this.value2); + + factory Approved._decode(_i1.Input input) { + return Approved( + _i1.U32Codec.codec.decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + ); + } + + /// Moment + final int value0; + + /// Option> + final _i4.Deposit? value1; + + /// Option> + final _i4.Deposit? value2; + + @override + Map> toJson() => { + 'Approved': [value0, value1?.toJson(), value2?.toJson()], + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value1); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value2); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(value0, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value1, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value2, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Approved && other.value0 == value0 && other.value1 == value1 && other.value2 == value2; + + @override + int get hashCode => Object.hash(value0, value1, value2); +} + +class Rejected extends ReferendumInfo { + const Rejected(this.value0, this.value1, this.value2); + + factory Rejected._decode(_i1.Input input) { + return Rejected( + _i1.U32Codec.codec.decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + ); + } + + /// Moment + final int value0; + + /// Option> + final _i4.Deposit? value1; + + /// Option> + final _i4.Deposit? value2; + + @override + Map> toJson() => { + 'Rejected': [value0, value1?.toJson(), value2?.toJson()], + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value1); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value2); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U32Codec.codec.encodeTo(value0, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value1, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value2, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Rejected && other.value0 == value0 && other.value1 == value1 && other.value2 == value2; + + @override + int get hashCode => Object.hash(value0, value1, value2); +} + +class Cancelled extends ReferendumInfo { + const Cancelled(this.value0, this.value1, this.value2); + + factory Cancelled._decode(_i1.Input input) { + return Cancelled( + _i1.U32Codec.codec.decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + ); + } + + /// Moment + final int value0; + + /// Option> + final _i4.Deposit? value1; + + /// Option> + final _i4.Deposit? value2; + + @override + Map> toJson() => { + 'Cancelled': [value0, value1?.toJson(), value2?.toJson()], + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value1); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value2); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.U32Codec.codec.encodeTo(value0, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value1, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value2, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Cancelled && other.value0 == value0 && other.value1 == value1 && other.value2 == value2; + + @override + int get hashCode => Object.hash(value0, value1, value2); +} + +class TimedOut extends ReferendumInfo { + const TimedOut(this.value0, this.value1, this.value2); + + factory TimedOut._decode(_i1.Input input) { + return TimedOut( + _i1.U32Codec.codec.decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + ); + } + + /// Moment + final int value0; + + /// Option> + final _i4.Deposit? value1; + + /// Option> + final _i4.Deposit? value2; + + @override + Map> toJson() => { + 'TimedOut': [value0, value1?.toJson(), value2?.toJson()], + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value1); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value2); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U32Codec.codec.encodeTo(value0, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value1, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value2, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TimedOut && other.value0 == value0 && other.value1 == value1 && other.value2 == value2; + + @override + int get hashCode => Object.hash(value0, value1, value2); +} + +class Killed extends ReferendumInfo { + const Killed(this.value0); + + factory Killed._decode(_i1.Input input) { + return Killed(_i1.U32Codec.codec.decode(input)); + } + + /// Moment + final int value0; + + @override + Map toJson() => {'Killed': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Killed && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_info_2.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_info_2.dart new file mode 100644 index 00000000..90cca436 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_info_2.dart @@ -0,0 +1,389 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'deposit.dart' as _i4; +import 'referendum_status_2.dart' as _i3; + +abstract class ReferendumInfo { + const ReferendumInfo(); + + factory ReferendumInfo.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $ReferendumInfoCodec codec = $ReferendumInfoCodec(); + + static const $ReferendumInfo values = $ReferendumInfo(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $ReferendumInfo { + const $ReferendumInfo(); + + Ongoing ongoing(_i3.ReferendumStatus value0) { + return Ongoing(value0); + } + + Approved approved(int value0, _i4.Deposit? value1, _i4.Deposit? value2) { + return Approved(value0, value1, value2); + } + + Rejected rejected(int value0, _i4.Deposit? value1, _i4.Deposit? value2) { + return Rejected(value0, value1, value2); + } + + Cancelled cancelled(int value0, _i4.Deposit? value1, _i4.Deposit? value2) { + return Cancelled(value0, value1, value2); + } + + TimedOut timedOut(int value0, _i4.Deposit? value1, _i4.Deposit? value2) { + return TimedOut(value0, value1, value2); + } + + Killed killed(int value0) { + return Killed(value0); + } +} + +class $ReferendumInfoCodec with _i1.Codec { + const $ReferendumInfoCodec(); + + @override + ReferendumInfo decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Ongoing._decode(input); + case 1: + return Approved._decode(input); + case 2: + return Rejected._decode(input); + case 3: + return Cancelled._decode(input); + case 4: + return TimedOut._decode(input); + case 5: + return Killed._decode(input); + default: + throw Exception('ReferendumInfo: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(ReferendumInfo value, _i1.Output output) { + switch (value.runtimeType) { + case Ongoing: + (value as Ongoing).encodeTo(output); + break; + case Approved: + (value as Approved).encodeTo(output); + break; + case Rejected: + (value as Rejected).encodeTo(output); + break; + case Cancelled: + (value as Cancelled).encodeTo(output); + break; + case TimedOut: + (value as TimedOut).encodeTo(output); + break; + case Killed: + (value as Killed).encodeTo(output); + break; + default: + throw Exception('ReferendumInfo: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(ReferendumInfo value) { + switch (value.runtimeType) { + case Ongoing: + return (value as Ongoing)._sizeHint(); + case Approved: + return (value as Approved)._sizeHint(); + case Rejected: + return (value as Rejected)._sizeHint(); + case Cancelled: + return (value as Cancelled)._sizeHint(); + case TimedOut: + return (value as TimedOut)._sizeHint(); + case Killed: + return (value as Killed)._sizeHint(); + default: + throw Exception('ReferendumInfo: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Ongoing extends ReferendumInfo { + const Ongoing(this.value0); + + factory Ongoing._decode(_i1.Input input) { + return Ongoing(_i3.ReferendumStatus.codec.decode(input)); + } + + /// ReferendumStatus + final _i3.ReferendumStatus value0; + + @override + Map> toJson() => {'Ongoing': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.ReferendumStatus.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.ReferendumStatus.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Ongoing && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Approved extends ReferendumInfo { + const Approved(this.value0, this.value1, this.value2); + + factory Approved._decode(_i1.Input input) { + return Approved( + _i1.U32Codec.codec.decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + ); + } + + /// Moment + final int value0; + + /// Option> + final _i4.Deposit? value1; + + /// Option> + final _i4.Deposit? value2; + + @override + Map> toJson() => { + 'Approved': [value0, value1?.toJson(), value2?.toJson()], + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value1); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value2); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(value0, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value1, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value2, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Approved && other.value0 == value0 && other.value1 == value1 && other.value2 == value2; + + @override + int get hashCode => Object.hash(value0, value1, value2); +} + +class Rejected extends ReferendumInfo { + const Rejected(this.value0, this.value1, this.value2); + + factory Rejected._decode(_i1.Input input) { + return Rejected( + _i1.U32Codec.codec.decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + ); + } + + /// Moment + final int value0; + + /// Option> + final _i4.Deposit? value1; + + /// Option> + final _i4.Deposit? value2; + + @override + Map> toJson() => { + 'Rejected': [value0, value1?.toJson(), value2?.toJson()], + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value1); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value2); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U32Codec.codec.encodeTo(value0, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value1, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value2, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Rejected && other.value0 == value0 && other.value1 == value1 && other.value2 == value2; + + @override + int get hashCode => Object.hash(value0, value1, value2); +} + +class Cancelled extends ReferendumInfo { + const Cancelled(this.value0, this.value1, this.value2); + + factory Cancelled._decode(_i1.Input input) { + return Cancelled( + _i1.U32Codec.codec.decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + ); + } + + /// Moment + final int value0; + + /// Option> + final _i4.Deposit? value1; + + /// Option> + final _i4.Deposit? value2; + + @override + Map> toJson() => { + 'Cancelled': [value0, value1?.toJson(), value2?.toJson()], + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value1); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value2); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.U32Codec.codec.encodeTo(value0, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value1, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value2, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Cancelled && other.value0 == value0 && other.value1 == value1 && other.value2 == value2; + + @override + int get hashCode => Object.hash(value0, value1, value2); +} + +class TimedOut extends ReferendumInfo { + const TimedOut(this.value0, this.value1, this.value2); + + factory TimedOut._decode(_i1.Input input) { + return TimedOut( + _i1.U32Codec.codec.decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).decode(input), + ); + } + + /// Moment + final int value0; + + /// Option> + final _i4.Deposit? value1; + + /// Option> + final _i4.Deposit? value2; + + @override + Map> toJson() => { + 'TimedOut': [value0, value1?.toJson(), value2?.toJson()], + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value1); + size = size + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).sizeHint(value2); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U32Codec.codec.encodeTo(value0, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value1, output); + const _i1.OptionCodec<_i4.Deposit>(_i4.Deposit.codec).encodeTo(value2, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TimedOut && other.value0 == value0 && other.value1 == value1 && other.value2 == value2; + + @override + int get hashCode => Object.hash(value0, value1, value2); +} + +class Killed extends ReferendumInfo { + const Killed(this.value0); + + factory Killed._decode(_i1.Input input) { + return Killed(_i1.U32Codec.codec.decode(input)); + } + + /// Moment + final int value0; + + @override + Map toJson() => {'Killed': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Killed && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_status_1.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_status_1.dart new file mode 100644 index 00000000..8bd8a9f7 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_status_1.dart @@ -0,0 +1,191 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i11; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../frame_support/traits/preimages/bounded.dart' as _i3; +import '../../frame_support/traits/schedule/dispatch_time.dart' as _i4; +import '../../pallet_conviction_voting/types/tally.dart' as _i7; +import '../../qp_scheduler/block_number_or_timestamp.dart' as _i10; +import '../../quantus_runtime/origin_caller.dart' as _i2; +import '../../tuples.dart' as _i8; +import '../../tuples_1.dart' as _i9; +import 'deciding_status.dart' as _i6; +import 'deposit.dart' as _i5; + +class ReferendumStatus { + const ReferendumStatus({ + required this.track, + required this.origin, + required this.proposal, + required this.enactment, + required this.submitted, + required this.submissionDeposit, + this.decisionDeposit, + this.deciding, + required this.tally, + required this.inQueue, + this.alarm, + }); + + factory ReferendumStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + /// TrackId + final int track; + + /// RuntimeOrigin + final _i2.OriginCaller origin; + + /// Call + final _i3.Bounded proposal; + + /// DispatchTime + final _i4.DispatchTime enactment; + + /// Moment + final int submitted; + + /// Deposit + final _i5.Deposit submissionDeposit; + + /// Option> + final _i5.Deposit? decisionDeposit; + + /// Option> + final _i6.DecidingStatus? deciding; + + /// Tally + final _i7.Tally tally; + + /// bool + final bool inQueue; + + /// Option<(Moment, ScheduleAddress)> + final _i8.Tuple2>? alarm; + + static const $ReferendumStatusCodec codec = $ReferendumStatusCodec(); + + _i11.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'track': track, + 'origin': origin.toJson(), + 'proposal': proposal.toJson(), + 'enactment': enactment.toJson(), + 'submitted': submitted, + 'submissionDeposit': submissionDeposit.toJson(), + 'decisionDeposit': decisionDeposit?.toJson(), + 'deciding': deciding?.toJson(), + 'tally': tally.toJson(), + 'inQueue': inQueue, + 'alarm': [ + alarm?.value0, + [alarm?.value1.value0.toJson(), alarm?.value1.value1], + ], + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ReferendumStatus && + other.track == track && + other.origin == origin && + other.proposal == proposal && + other.enactment == enactment && + other.submitted == submitted && + other.submissionDeposit == submissionDeposit && + other.decisionDeposit == decisionDeposit && + other.deciding == deciding && + other.tally == tally && + other.inQueue == inQueue && + other.alarm == alarm; + + @override + int get hashCode => Object.hash( + track, + origin, + proposal, + enactment, + submitted, + submissionDeposit, + decisionDeposit, + deciding, + tally, + inQueue, + alarm, + ); +} + +class $ReferendumStatusCodec with _i1.Codec { + const $ReferendumStatusCodec(); + + @override + void encodeTo(ReferendumStatus obj, _i1.Output output) { + _i1.U16Codec.codec.encodeTo(obj.track, output); + _i2.OriginCaller.codec.encodeTo(obj.origin, output); + _i3.Bounded.codec.encodeTo(obj.proposal, output); + _i4.DispatchTime.codec.encodeTo(obj.enactment, output); + _i1.U32Codec.codec.encodeTo(obj.submitted, output); + _i5.Deposit.codec.encodeTo(obj.submissionDeposit, output); + const _i1.OptionCodec<_i5.Deposit>(_i5.Deposit.codec).encodeTo(obj.decisionDeposit, output); + const _i1.OptionCodec<_i6.DecidingStatus>(_i6.DecidingStatus.codec).encodeTo(obj.deciding, output); + _i7.Tally.codec.encodeTo(obj.tally, output); + _i1.BoolCodec.codec.encodeTo(obj.inQueue, output); + const _i1.OptionCodec<_i8.Tuple2>>( + _i8.Tuple2Codec>( + _i1.U32Codec.codec, + _i9.Tuple2Codec<_i10.BlockNumberOrTimestamp, int>(_i10.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ), + ).encodeTo(obj.alarm, output); + } + + @override + ReferendumStatus decode(_i1.Input input) { + return ReferendumStatus( + track: _i1.U16Codec.codec.decode(input), + origin: _i2.OriginCaller.codec.decode(input), + proposal: _i3.Bounded.codec.decode(input), + enactment: _i4.DispatchTime.codec.decode(input), + submitted: _i1.U32Codec.codec.decode(input), + submissionDeposit: _i5.Deposit.codec.decode(input), + decisionDeposit: const _i1.OptionCodec<_i5.Deposit>(_i5.Deposit.codec).decode(input), + deciding: const _i1.OptionCodec<_i6.DecidingStatus>(_i6.DecidingStatus.codec).decode(input), + tally: _i7.Tally.codec.decode(input), + inQueue: _i1.BoolCodec.codec.decode(input), + alarm: const _i1.OptionCodec<_i8.Tuple2>>( + _i8.Tuple2Codec>( + _i1.U32Codec.codec, + _i9.Tuple2Codec<_i10.BlockNumberOrTimestamp, int>(_i10.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ), + ).decode(input), + ); + } + + @override + int sizeHint(ReferendumStatus obj) { + int size = 0; + size = size + _i1.U16Codec.codec.sizeHint(obj.track); + size = size + _i2.OriginCaller.codec.sizeHint(obj.origin); + size = size + _i3.Bounded.codec.sizeHint(obj.proposal); + size = size + _i4.DispatchTime.codec.sizeHint(obj.enactment); + size = size + _i1.U32Codec.codec.sizeHint(obj.submitted); + size = size + _i5.Deposit.codec.sizeHint(obj.submissionDeposit); + size = size + const _i1.OptionCodec<_i5.Deposit>(_i5.Deposit.codec).sizeHint(obj.decisionDeposit); + size = size + const _i1.OptionCodec<_i6.DecidingStatus>(_i6.DecidingStatus.codec).sizeHint(obj.deciding); + size = size + _i7.Tally.codec.sizeHint(obj.tally); + size = size + _i1.BoolCodec.codec.sizeHint(obj.inQueue); + size = + size + + const _i1.OptionCodec<_i8.Tuple2>>( + _i8.Tuple2Codec>( + _i1.U32Codec.codec, + _i9.Tuple2Codec<_i10.BlockNumberOrTimestamp, int>(_i10.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ), + ).sizeHint(obj.alarm); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_status_2.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_status_2.dart new file mode 100644 index 00000000..80e244fd --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/referendum_status_2.dart @@ -0,0 +1,191 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i11; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../frame_support/traits/preimages/bounded.dart' as _i3; +import '../../frame_support/traits/schedule/dispatch_time.dart' as _i4; +import '../../pallet_ranked_collective/tally.dart' as _i7; +import '../../qp_scheduler/block_number_or_timestamp.dart' as _i10; +import '../../quantus_runtime/origin_caller.dart' as _i2; +import '../../tuples.dart' as _i8; +import '../../tuples_1.dart' as _i9; +import 'deciding_status.dart' as _i6; +import 'deposit.dart' as _i5; + +class ReferendumStatus { + const ReferendumStatus({ + required this.track, + required this.origin, + required this.proposal, + required this.enactment, + required this.submitted, + required this.submissionDeposit, + this.decisionDeposit, + this.deciding, + required this.tally, + required this.inQueue, + this.alarm, + }); + + factory ReferendumStatus.decode(_i1.Input input) { + return codec.decode(input); + } + + /// TrackId + final int track; + + /// RuntimeOrigin + final _i2.OriginCaller origin; + + /// Call + final _i3.Bounded proposal; + + /// DispatchTime + final _i4.DispatchTime enactment; + + /// Moment + final int submitted; + + /// Deposit + final _i5.Deposit submissionDeposit; + + /// Option> + final _i5.Deposit? decisionDeposit; + + /// Option> + final _i6.DecidingStatus? deciding; + + /// Tally + final _i7.Tally tally; + + /// bool + final bool inQueue; + + /// Option<(Moment, ScheduleAddress)> + final _i8.Tuple2>? alarm; + + static const $ReferendumStatusCodec codec = $ReferendumStatusCodec(); + + _i11.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'track': track, + 'origin': origin.toJson(), + 'proposal': proposal.toJson(), + 'enactment': enactment.toJson(), + 'submitted': submitted, + 'submissionDeposit': submissionDeposit.toJson(), + 'decisionDeposit': decisionDeposit?.toJson(), + 'deciding': deciding?.toJson(), + 'tally': tally.toJson(), + 'inQueue': inQueue, + 'alarm': [ + alarm?.value0, + [alarm?.value1.value0.toJson(), alarm?.value1.value1], + ], + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ReferendumStatus && + other.track == track && + other.origin == origin && + other.proposal == proposal && + other.enactment == enactment && + other.submitted == submitted && + other.submissionDeposit == submissionDeposit && + other.decisionDeposit == decisionDeposit && + other.deciding == deciding && + other.tally == tally && + other.inQueue == inQueue && + other.alarm == alarm; + + @override + int get hashCode => Object.hash( + track, + origin, + proposal, + enactment, + submitted, + submissionDeposit, + decisionDeposit, + deciding, + tally, + inQueue, + alarm, + ); +} + +class $ReferendumStatusCodec with _i1.Codec { + const $ReferendumStatusCodec(); + + @override + void encodeTo(ReferendumStatus obj, _i1.Output output) { + _i1.U16Codec.codec.encodeTo(obj.track, output); + _i2.OriginCaller.codec.encodeTo(obj.origin, output); + _i3.Bounded.codec.encodeTo(obj.proposal, output); + _i4.DispatchTime.codec.encodeTo(obj.enactment, output); + _i1.U32Codec.codec.encodeTo(obj.submitted, output); + _i5.Deposit.codec.encodeTo(obj.submissionDeposit, output); + const _i1.OptionCodec<_i5.Deposit>(_i5.Deposit.codec).encodeTo(obj.decisionDeposit, output); + const _i1.OptionCodec<_i6.DecidingStatus>(_i6.DecidingStatus.codec).encodeTo(obj.deciding, output); + _i7.Tally.codec.encodeTo(obj.tally, output); + _i1.BoolCodec.codec.encodeTo(obj.inQueue, output); + const _i1.OptionCodec<_i8.Tuple2>>( + _i8.Tuple2Codec>( + _i1.U32Codec.codec, + _i9.Tuple2Codec<_i10.BlockNumberOrTimestamp, int>(_i10.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ), + ).encodeTo(obj.alarm, output); + } + + @override + ReferendumStatus decode(_i1.Input input) { + return ReferendumStatus( + track: _i1.U16Codec.codec.decode(input), + origin: _i2.OriginCaller.codec.decode(input), + proposal: _i3.Bounded.codec.decode(input), + enactment: _i4.DispatchTime.codec.decode(input), + submitted: _i1.U32Codec.codec.decode(input), + submissionDeposit: _i5.Deposit.codec.decode(input), + decisionDeposit: const _i1.OptionCodec<_i5.Deposit>(_i5.Deposit.codec).decode(input), + deciding: const _i1.OptionCodec<_i6.DecidingStatus>(_i6.DecidingStatus.codec).decode(input), + tally: _i7.Tally.codec.decode(input), + inQueue: _i1.BoolCodec.codec.decode(input), + alarm: const _i1.OptionCodec<_i8.Tuple2>>( + _i8.Tuple2Codec>( + _i1.U32Codec.codec, + _i9.Tuple2Codec<_i10.BlockNumberOrTimestamp, int>(_i10.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ), + ).decode(input), + ); + } + + @override + int sizeHint(ReferendumStatus obj) { + int size = 0; + size = size + _i1.U16Codec.codec.sizeHint(obj.track); + size = size + _i2.OriginCaller.codec.sizeHint(obj.origin); + size = size + _i3.Bounded.codec.sizeHint(obj.proposal); + size = size + _i4.DispatchTime.codec.sizeHint(obj.enactment); + size = size + _i1.U32Codec.codec.sizeHint(obj.submitted); + size = size + _i5.Deposit.codec.sizeHint(obj.submissionDeposit); + size = size + const _i1.OptionCodec<_i5.Deposit>(_i5.Deposit.codec).sizeHint(obj.decisionDeposit); + size = size + const _i1.OptionCodec<_i6.DecidingStatus>(_i6.DecidingStatus.codec).sizeHint(obj.deciding); + size = size + _i7.Tally.codec.sizeHint(obj.tally); + size = size + _i1.BoolCodec.codec.sizeHint(obj.inQueue); + size = + size + + const _i1.OptionCodec<_i8.Tuple2>>( + _i8.Tuple2Codec>( + _i1.U32Codec.codec, + _i9.Tuple2Codec<_i10.BlockNumberOrTimestamp, int>(_i10.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ), + ).sizeHint(obj.alarm); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/track_details.dart b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/track_details.dart new file mode 100644 index 00000000..fe353521 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_referenda/types/track_details.dart @@ -0,0 +1,143 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'curve.dart' as _i2; + +class TrackDetails { + const TrackDetails({ + required this.name, + required this.maxDeciding, + required this.decisionDeposit, + required this.preparePeriod, + required this.decisionPeriod, + required this.confirmPeriod, + required this.minEnactmentPeriod, + required this.minApproval, + required this.minSupport, + }); + + factory TrackDetails.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Name + final String name; + + /// u32 + final int maxDeciding; + + /// Balance + final BigInt decisionDeposit; + + /// Moment + final int preparePeriod; + + /// Moment + final int decisionPeriod; + + /// Moment + final int confirmPeriod; + + /// Moment + final int minEnactmentPeriod; + + /// Curve + final _i2.Curve minApproval; + + /// Curve + final _i2.Curve minSupport; + + static const $TrackDetailsCodec codec = $TrackDetailsCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'name': name, + 'maxDeciding': maxDeciding, + 'decisionDeposit': decisionDeposit, + 'preparePeriod': preparePeriod, + 'decisionPeriod': decisionPeriod, + 'confirmPeriod': confirmPeriod, + 'minEnactmentPeriod': minEnactmentPeriod, + 'minApproval': minApproval.toJson(), + 'minSupport': minSupport.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TrackDetails && + other.name == name && + other.maxDeciding == maxDeciding && + other.decisionDeposit == decisionDeposit && + other.preparePeriod == preparePeriod && + other.decisionPeriod == decisionPeriod && + other.confirmPeriod == confirmPeriod && + other.minEnactmentPeriod == minEnactmentPeriod && + other.minApproval == minApproval && + other.minSupport == minSupport; + + @override + int get hashCode => Object.hash( + name, + maxDeciding, + decisionDeposit, + preparePeriod, + decisionPeriod, + confirmPeriod, + minEnactmentPeriod, + minApproval, + minSupport, + ); +} + +class $TrackDetailsCodec with _i1.Codec { + const $TrackDetailsCodec(); + + @override + void encodeTo(TrackDetails obj, _i1.Output output) { + _i1.StrCodec.codec.encodeTo(obj.name, output); + _i1.U32Codec.codec.encodeTo(obj.maxDeciding, output); + _i1.U128Codec.codec.encodeTo(obj.decisionDeposit, output); + _i1.U32Codec.codec.encodeTo(obj.preparePeriod, output); + _i1.U32Codec.codec.encodeTo(obj.decisionPeriod, output); + _i1.U32Codec.codec.encodeTo(obj.confirmPeriod, output); + _i1.U32Codec.codec.encodeTo(obj.minEnactmentPeriod, output); + _i2.Curve.codec.encodeTo(obj.minApproval, output); + _i2.Curve.codec.encodeTo(obj.minSupport, output); + } + + @override + TrackDetails decode(_i1.Input input) { + return TrackDetails( + name: _i1.StrCodec.codec.decode(input), + maxDeciding: _i1.U32Codec.codec.decode(input), + decisionDeposit: _i1.U128Codec.codec.decode(input), + preparePeriod: _i1.U32Codec.codec.decode(input), + decisionPeriod: _i1.U32Codec.codec.decode(input), + confirmPeriod: _i1.U32Codec.codec.decode(input), + minEnactmentPeriod: _i1.U32Codec.codec.decode(input), + minApproval: _i2.Curve.codec.decode(input), + minSupport: _i2.Curve.codec.decode(input), + ); + } + + @override + int sizeHint(TrackDetails obj) { + int size = 0; + size = size + _i1.StrCodec.codec.sizeHint(obj.name); + size = size + _i1.U32Codec.codec.sizeHint(obj.maxDeciding); + size = size + _i1.U128Codec.codec.sizeHint(obj.decisionDeposit); + size = size + _i1.U32Codec.codec.sizeHint(obj.preparePeriod); + size = size + _i1.U32Codec.codec.sizeHint(obj.decisionPeriod); + size = size + _i1.U32Codec.codec.sizeHint(obj.confirmPeriod); + size = size + _i1.U32Codec.codec.sizeHint(obj.minEnactmentPeriod); + size = size + _i2.Curve.codec.sizeHint(obj.minApproval); + size = size + _i2.Curve.codec.sizeHint(obj.minSupport); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/high_security_account_data.dart b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/high_security_account_data.dart new file mode 100644 index 00000000..34d86c1e --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/high_security_account_data.dart @@ -0,0 +1,64 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../qp_scheduler/block_number_or_timestamp.dart' as _i3; +import '../sp_core/crypto/account_id32.dart' as _i2; + +class HighSecurityAccountData { + const HighSecurityAccountData({required this.interceptor, required this.delay}); + + factory HighSecurityAccountData.decode(_i1.Input input) { + return codec.decode(input); + } + + /// AccountId + final _i2.AccountId32 interceptor; + + /// Delay + final _i3.BlockNumberOrTimestamp delay; + + static const $HighSecurityAccountDataCodec codec = $HighSecurityAccountDataCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'interceptor': interceptor.toList(), 'delay': delay.toJson()}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is HighSecurityAccountData && _i5.listsEqual(other.interceptor, interceptor) && other.delay == delay; + + @override + int get hashCode => Object.hash(interceptor, delay); +} + +class $HighSecurityAccountDataCodec with _i1.Codec { + const $HighSecurityAccountDataCodec(); + + @override + void encodeTo(HighSecurityAccountData obj, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(obj.interceptor, output); + _i3.BlockNumberOrTimestamp.codec.encodeTo(obj.delay, output); + } + + @override + HighSecurityAccountData decode(_i1.Input input) { + return HighSecurityAccountData( + interceptor: const _i1.U8ArrayCodec(32).decode(input), + delay: _i3.BlockNumberOrTimestamp.codec.decode(input), + ); + } + + @override + int sizeHint(HighSecurityAccountData obj) { + int size = 0; + size = size + const _i2.AccountId32Codec().sizeHint(obj.interceptor); + size = size + _i3.BlockNumberOrTimestamp.codec.sizeHint(obj.delay); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/call.dart new file mode 100644 index 00000000..8972870c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/call.dart @@ -0,0 +1,552 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i7; + +import '../../primitive_types/h256.dart' as _i5; +import '../../qp_scheduler/block_number_or_timestamp.dart' as _i3; +import '../../sp_core/crypto/account_id32.dart' as _i4; +import '../../sp_runtime/multiaddress/multi_address.dart' as _i6; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + SetHighSecurity setHighSecurity({required _i3.BlockNumberOrTimestamp delay, required _i4.AccountId32 interceptor}) { + return SetHighSecurity(delay: delay, interceptor: interceptor); + } + + Cancel cancel({required _i5.H256 txId}) { + return Cancel(txId: txId); + } + + ExecuteTransfer executeTransfer({required _i5.H256 txId}) { + return ExecuteTransfer(txId: txId); + } + + ScheduleTransfer scheduleTransfer({required _i6.MultiAddress dest, required BigInt amount}) { + return ScheduleTransfer(dest: dest, amount: amount); + } + + ScheduleTransferWithDelay scheduleTransferWithDelay({ + required _i6.MultiAddress dest, + required BigInt amount, + required _i3.BlockNumberOrTimestamp delay, + }) { + return ScheduleTransferWithDelay(dest: dest, amount: amount, delay: delay); + } + + ScheduleAssetTransfer scheduleAssetTransfer({ + required int assetId, + required _i6.MultiAddress dest, + required BigInt amount, + }) { + return ScheduleAssetTransfer(assetId: assetId, dest: dest, amount: amount); + } + + ScheduleAssetTransferWithDelay scheduleAssetTransferWithDelay({ + required int assetId, + required _i6.MultiAddress dest, + required BigInt amount, + required _i3.BlockNumberOrTimestamp delay, + }) { + return ScheduleAssetTransferWithDelay(assetId: assetId, dest: dest, amount: amount, delay: delay); + } + + RecoverFunds recoverFunds({required _i4.AccountId32 account}) { + return RecoverFunds(account: account); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return SetHighSecurity._decode(input); + case 1: + return Cancel._decode(input); + case 2: + return ExecuteTransfer._decode(input); + case 3: + return ScheduleTransfer._decode(input); + case 4: + return ScheduleTransferWithDelay._decode(input); + case 5: + return ScheduleAssetTransfer._decode(input); + case 6: + return ScheduleAssetTransferWithDelay._decode(input); + case 7: + return RecoverFunds._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case SetHighSecurity: + (value as SetHighSecurity).encodeTo(output); + break; + case Cancel: + (value as Cancel).encodeTo(output); + break; + case ExecuteTransfer: + (value as ExecuteTransfer).encodeTo(output); + break; + case ScheduleTransfer: + (value as ScheduleTransfer).encodeTo(output); + break; + case ScheduleTransferWithDelay: + (value as ScheduleTransferWithDelay).encodeTo(output); + break; + case ScheduleAssetTransfer: + (value as ScheduleAssetTransfer).encodeTo(output); + break; + case ScheduleAssetTransferWithDelay: + (value as ScheduleAssetTransferWithDelay).encodeTo(output); + break; + case RecoverFunds: + (value as RecoverFunds).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case SetHighSecurity: + return (value as SetHighSecurity)._sizeHint(); + case Cancel: + return (value as Cancel)._sizeHint(); + case ExecuteTransfer: + return (value as ExecuteTransfer)._sizeHint(); + case ScheduleTransfer: + return (value as ScheduleTransfer)._sizeHint(); + case ScheduleTransferWithDelay: + return (value as ScheduleTransferWithDelay)._sizeHint(); + case ScheduleAssetTransfer: + return (value as ScheduleAssetTransfer)._sizeHint(); + case ScheduleAssetTransferWithDelay: + return (value as ScheduleAssetTransferWithDelay)._sizeHint(); + case RecoverFunds: + return (value as RecoverFunds)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Enable high-security for the calling account with a specified +/// reversibility delay. +/// +/// Recoverer and interceptor (aka guardian) could be the same account or +/// different accounts. +/// +/// Once an account is set as high security it can only make reversible +/// transfers. It is not allowed any other calls. +/// +/// - `delay`: The reversibility time for any transfer made by the high +/// security account. +/// - interceptor: The account that can intercept transctions from the +/// high security account. +class SetHighSecurity extends Call { + const SetHighSecurity({required this.delay, required this.interceptor}); + + factory SetHighSecurity._decode(_i1.Input input) { + return SetHighSecurity( + delay: _i3.BlockNumberOrTimestamp.codec.decode(input), + interceptor: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// BlockNumberOrTimestampOf + final _i3.BlockNumberOrTimestamp delay; + + /// T::AccountId + final _i4.AccountId32 interceptor; + + @override + Map> toJson() => { + 'set_high_security': {'delay': delay.toJson(), 'interceptor': interceptor.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.BlockNumberOrTimestamp.codec.sizeHint(delay); + size = size + const _i4.AccountId32Codec().sizeHint(interceptor); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.BlockNumberOrTimestamp.codec.encodeTo(delay, output); + const _i1.U8ArrayCodec(32).encodeTo(interceptor, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SetHighSecurity && other.delay == delay && _i7.listsEqual(other.interceptor, interceptor); + + @override + int get hashCode => Object.hash(delay, interceptor); +} + +/// Cancel a pending reversible transaction scheduled by the caller. +/// +/// - `tx_id`: The unique identifier of the transaction to cancel. +class Cancel extends Call { + const Cancel({required this.txId}); + + factory Cancel._decode(_i1.Input input) { + return Cancel(txId: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i5.H256 txId; + + @override + Map>> toJson() => { + 'cancel': {'txId': txId.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i5.H256Codec().sizeHint(txId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(txId, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Cancel && _i7.listsEqual(other.txId, txId); + + @override + int get hashCode => txId.hashCode; +} + +/// Called by the Scheduler to finalize the scheduled task/call +/// +/// - `tx_id`: The unique id of the transaction to finalize and dispatch. +class ExecuteTransfer extends Call { + const ExecuteTransfer({required this.txId}); + + factory ExecuteTransfer._decode(_i1.Input input) { + return ExecuteTransfer(txId: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::Hash + final _i5.H256 txId; + + @override + Map>> toJson() => { + 'execute_transfer': {'txId': txId.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i5.H256Codec().sizeHint(txId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(txId, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ExecuteTransfer && _i7.listsEqual(other.txId, txId); + + @override + int get hashCode => txId.hashCode; +} + +/// Schedule a transaction for delayed execution. +class ScheduleTransfer extends Call { + const ScheduleTransfer({required this.dest, required this.amount}); + + factory ScheduleTransfer._decode(_i1.Input input) { + return ScheduleTransfer(dest: _i6.MultiAddress.codec.decode(input), amount: _i1.U128Codec.codec.decode(input)); + } + + /// <::Lookup as StaticLookup>::Source + final _i6.MultiAddress dest; + + /// BalanceOf + final BigInt amount; + + @override + Map> toJson() => { + 'schedule_transfer': {'dest': dest.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i6.MultiAddress.codec.sizeHint(dest); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i6.MultiAddress.codec.encodeTo(dest, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is ScheduleTransfer && other.dest == dest && other.amount == amount; + + @override + int get hashCode => Object.hash(dest, amount); +} + +/// Schedule a transaction for delayed execution with a custom, one-time delay. +/// +/// This can only be used by accounts that have *not* set up a persistent +/// reversibility configuration with `set_high_security`. +/// +/// - `delay`: The time (in blocks or milliseconds) before the transaction executes. +class ScheduleTransferWithDelay extends Call { + const ScheduleTransferWithDelay({required this.dest, required this.amount, required this.delay}); + + factory ScheduleTransferWithDelay._decode(_i1.Input input) { + return ScheduleTransferWithDelay( + dest: _i6.MultiAddress.codec.decode(input), + amount: _i1.U128Codec.codec.decode(input), + delay: _i3.BlockNumberOrTimestamp.codec.decode(input), + ); + } + + /// <::Lookup as StaticLookup>::Source + final _i6.MultiAddress dest; + + /// BalanceOf + final BigInt amount; + + /// BlockNumberOrTimestampOf + final _i3.BlockNumberOrTimestamp delay; + + @override + Map> toJson() => { + 'schedule_transfer_with_delay': {'dest': dest.toJson(), 'amount': amount, 'delay': delay.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i6.MultiAddress.codec.sizeHint(dest); + size = size + _i1.U128Codec.codec.sizeHint(amount); + size = size + _i3.BlockNumberOrTimestamp.codec.sizeHint(delay); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i6.MultiAddress.codec.encodeTo(dest, output); + _i1.U128Codec.codec.encodeTo(amount, output); + _i3.BlockNumberOrTimestamp.codec.encodeTo(delay, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ScheduleTransferWithDelay && other.dest == dest && other.amount == amount && other.delay == delay; + + @override + int get hashCode => Object.hash(dest, amount, delay); +} + +/// Schedule an asset transfer (pallet-assets) for delayed execution using the configured +/// delay. +class ScheduleAssetTransfer extends Call { + const ScheduleAssetTransfer({required this.assetId, required this.dest, required this.amount}); + + factory ScheduleAssetTransfer._decode(_i1.Input input) { + return ScheduleAssetTransfer( + assetId: _i1.U32Codec.codec.decode(input), + dest: _i6.MultiAddress.codec.decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + /// AssetIdOf + final int assetId; + + /// <::Lookup as StaticLookup>::Source + final _i6.MultiAddress dest; + + /// BalanceOf + final BigInt amount; + + @override + Map> toJson() => { + 'schedule_asset_transfer': {'assetId': assetId, 'dest': dest.toJson(), 'amount': amount}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i6.MultiAddress.codec.sizeHint(dest); + size = size + _i1.U128Codec.codec.sizeHint(amount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i6.MultiAddress.codec.encodeTo(dest, output); + _i1.U128Codec.codec.encodeTo(amount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ScheduleAssetTransfer && other.assetId == assetId && other.dest == dest && other.amount == amount; + + @override + int get hashCode => Object.hash(assetId, dest, amount); +} + +/// Schedule an asset transfer (pallet-assets) with a custom one-time delay. +class ScheduleAssetTransferWithDelay extends Call { + const ScheduleAssetTransferWithDelay({ + required this.assetId, + required this.dest, + required this.amount, + required this.delay, + }); + + factory ScheduleAssetTransferWithDelay._decode(_i1.Input input) { + return ScheduleAssetTransferWithDelay( + assetId: _i1.U32Codec.codec.decode(input), + dest: _i6.MultiAddress.codec.decode(input), + amount: _i1.U128Codec.codec.decode(input), + delay: _i3.BlockNumberOrTimestamp.codec.decode(input), + ); + } + + /// AssetIdOf + final int assetId; + + /// <::Lookup as StaticLookup>::Source + final _i6.MultiAddress dest; + + /// BalanceOf + final BigInt amount; + + /// BlockNumberOrTimestampOf + final _i3.BlockNumberOrTimestamp delay; + + @override + Map> toJson() => { + 'schedule_asset_transfer_with_delay': { + 'assetId': assetId, + 'dest': dest.toJson(), + 'amount': amount, + 'delay': delay.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + _i6.MultiAddress.codec.sizeHint(dest); + size = size + _i1.U128Codec.codec.sizeHint(amount); + size = size + _i3.BlockNumberOrTimestamp.codec.sizeHint(delay); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + _i6.MultiAddress.codec.encodeTo(dest, output); + _i1.U128Codec.codec.encodeTo(amount, output); + _i3.BlockNumberOrTimestamp.codec.encodeTo(delay, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ScheduleAssetTransferWithDelay && + other.assetId == assetId && + other.dest == dest && + other.amount == amount && + other.delay == delay; + + @override + int get hashCode => Object.hash(assetId, dest, amount, delay); +} + +/// Allows the guardian (interceptor) to recover all funds from a high security +/// account by transferring the entire balance to themselves. +/// +/// This is an emergency function for when the high security account may be compromised. +class RecoverFunds extends Call { + const RecoverFunds({required this.account}); + + factory RecoverFunds._decode(_i1.Input input) { + return RecoverFunds(account: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i4.AccountId32 account; + + @override + Map>> toJson() => { + 'recover_funds': {'account': account.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i4.AccountId32Codec().sizeHint(account); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + const _i1.U8ArrayCodec(32).encodeTo(account, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RecoverFunds && _i7.listsEqual(other.account, account); + + @override + int get hashCode => account.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/error.dart new file mode 100644 index 00000000..fe4a30c2 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/error.dart @@ -0,0 +1,124 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// The account attempting to enable reversibility is already marked as reversible. + accountAlreadyHighSecurity('AccountAlreadyHighSecurity', 0), + + /// The account attempting the action is not marked as high security. + accountNotHighSecurity('AccountNotHighSecurity', 1), + + /// Interceptor can not be the account itself, because it is redundant. + interceptorCannotBeSelf('InterceptorCannotBeSelf', 2), + + /// Recoverer cannot be the account itself, because it is redundant. + recovererCannotBeSelf('RecovererCannotBeSelf', 3), + + /// The specified pending transaction ID was not found. + pendingTxNotFound('PendingTxNotFound', 4), + + /// The caller is not the original submitter of the transaction they are trying to cancel. + notOwner('NotOwner', 5), + + /// The account has reached the maximum number of pending reversible transactions. + tooManyPendingTransactions('TooManyPendingTransactions', 6), + + /// The specified delay period is below the configured minimum. + delayTooShort('DelayTooShort', 7), + + /// Failed to schedule the transaction execution with the scheduler pallet. + schedulingFailed('SchedulingFailed', 8), + + /// Failed to cancel the scheduled task with the scheduler pallet. + cancellationFailed('CancellationFailed', 9), + + /// Failed to decode the OpaqueCall back into a RuntimeCall. + callDecodingFailed('CallDecodingFailed', 10), + + /// Call is invalid. + invalidCall('InvalidCall', 11), + + /// Invalid scheduler origin + invalidSchedulerOrigin('InvalidSchedulerOrigin', 12), + + /// Reverser is invalid + invalidReverser('InvalidReverser', 13), + + /// Cannot schedule one time reversible transaction when account is reversible (theft + /// deterrence) + accountAlreadyReversibleCannotScheduleOneTime('AccountAlreadyReversibleCannotScheduleOneTime', 14), + + /// The interceptor has reached the maximum number of accounts they can intercept for. + tooManyInterceptorAccounts('TooManyInterceptorAccounts', 15); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.accountAlreadyHighSecurity; + case 1: + return Error.accountNotHighSecurity; + case 2: + return Error.interceptorCannotBeSelf; + case 3: + return Error.recovererCannotBeSelf; + case 4: + return Error.pendingTxNotFound; + case 5: + return Error.notOwner; + case 6: + return Error.tooManyPendingTransactions; + case 7: + return Error.delayTooShort; + case 8: + return Error.schedulingFailed; + case 9: + return Error.cancellationFailed; + case 10: + return Error.callDecodingFailed; + case 11: + return Error.invalidCall; + case 12: + return Error.invalidSchedulerOrigin; + case 13: + return Error.invalidReverser; + case 14: + return Error.accountAlreadyReversibleCannotScheduleOneTime; + case 15: + return Error.tooManyInterceptorAccounts; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/event.dart new file mode 100644 index 00000000..6d0203f2 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/event.dart @@ -0,0 +1,443 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i9; + +import '../../frame_support/dispatch/post_dispatch_info.dart' as _i7; +import '../../primitive_types/h256.dart' as _i5; +import '../../qp_scheduler/block_number_or_timestamp.dart' as _i4; +import '../../qp_scheduler/dispatch_time.dart' as _i6; +import '../../sp_core/crypto/account_id32.dart' as _i3; +import '../../sp_runtime/dispatch_error_with_post_info.dart' as _i8; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + HighSecuritySet highSecuritySet({ + required _i3.AccountId32 who, + required _i3.AccountId32 interceptor, + required _i4.BlockNumberOrTimestamp delay, + }) { + return HighSecuritySet(who: who, interceptor: interceptor, delay: delay); + } + + TransactionScheduled transactionScheduled({ + required _i3.AccountId32 from, + required _i3.AccountId32 to, + required _i3.AccountId32 interceptor, + int? assetId, + required BigInt amount, + required _i5.H256 txId, + required _i6.DispatchTime executeAt, + }) { + return TransactionScheduled( + from: from, + to: to, + interceptor: interceptor, + assetId: assetId, + amount: amount, + txId: txId, + executeAt: executeAt, + ); + } + + TransactionCancelled transactionCancelled({required _i3.AccountId32 who, required _i5.H256 txId}) { + return TransactionCancelled(who: who, txId: txId); + } + + TransactionExecuted transactionExecuted({ + required _i5.H256 txId, + required _i1.Result<_i7.PostDispatchInfo, _i8.DispatchErrorWithPostInfo> result, + }) { + return TransactionExecuted(txId: txId, result: result); + } + + FundsRecovered fundsRecovered({required _i3.AccountId32 account, required _i3.AccountId32 guardian}) { + return FundsRecovered(account: account, guardian: guardian); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return HighSecuritySet._decode(input); + case 1: + return TransactionScheduled._decode(input); + case 2: + return TransactionCancelled._decode(input); + case 3: + return TransactionExecuted._decode(input); + case 4: + return FundsRecovered._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case HighSecuritySet: + (value as HighSecuritySet).encodeTo(output); + break; + case TransactionScheduled: + (value as TransactionScheduled).encodeTo(output); + break; + case TransactionCancelled: + (value as TransactionCancelled).encodeTo(output); + break; + case TransactionExecuted: + (value as TransactionExecuted).encodeTo(output); + break; + case FundsRecovered: + (value as FundsRecovered).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case HighSecuritySet: + return (value as HighSecuritySet)._sizeHint(); + case TransactionScheduled: + return (value as TransactionScheduled)._sizeHint(); + case TransactionCancelled: + return (value as TransactionCancelled)._sizeHint(); + case TransactionExecuted: + return (value as TransactionExecuted)._sizeHint(); + case FundsRecovered: + return (value as FundsRecovered)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A user has enabled their high-security settings. +/// [who, interceptor, recoverer, delay] +class HighSecuritySet extends Event { + const HighSecuritySet({required this.who, required this.interceptor, required this.delay}); + + factory HighSecuritySet._decode(_i1.Input input) { + return HighSecuritySet( + who: const _i1.U8ArrayCodec(32).decode(input), + interceptor: const _i1.U8ArrayCodec(32).decode(input), + delay: _i4.BlockNumberOrTimestamp.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::AccountId + final _i3.AccountId32 interceptor; + + /// BlockNumberOrTimestampOf + final _i4.BlockNumberOrTimestamp delay; + + @override + Map> toJson() => { + 'HighSecuritySet': {'who': who.toList(), 'interceptor': interceptor.toList(), 'delay': delay.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + const _i3.AccountId32Codec().sizeHint(interceptor); + size = size + _i4.BlockNumberOrTimestamp.codec.sizeHint(delay); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + const _i1.U8ArrayCodec(32).encodeTo(interceptor, output); + _i4.BlockNumberOrTimestamp.codec.encodeTo(delay, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is HighSecuritySet && + _i9.listsEqual(other.who, who) && + _i9.listsEqual(other.interceptor, interceptor) && + other.delay == delay; + + @override + int get hashCode => Object.hash(who, interceptor, delay); +} + +/// A transaction has been intercepted and scheduled for delayed execution. +/// [from, to, interceptor, amount, tx_id, execute_at_moment] +class TransactionScheduled extends Event { + const TransactionScheduled({ + required this.from, + required this.to, + required this.interceptor, + this.assetId, + required this.amount, + required this.txId, + required this.executeAt, + }); + + factory TransactionScheduled._decode(_i1.Input input) { + return TransactionScheduled( + from: const _i1.U8ArrayCodec(32).decode(input), + to: const _i1.U8ArrayCodec(32).decode(input), + interceptor: const _i1.U8ArrayCodec(32).decode(input), + assetId: const _i1.OptionCodec(_i1.U32Codec.codec).decode(input), + amount: _i1.U128Codec.codec.decode(input), + txId: const _i1.U8ArrayCodec(32).decode(input), + executeAt: _i6.DispatchTime.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 from; + + /// T::AccountId + final _i3.AccountId32 to; + + /// T::AccountId + final _i3.AccountId32 interceptor; + + /// Option> + final int? assetId; + + /// BalanceOf + final BigInt amount; + + /// T::Hash + final _i5.H256 txId; + + /// DispatchTime, T::Moment> + final _i6.DispatchTime executeAt; + + @override + Map> toJson() => { + 'TransactionScheduled': { + 'from': from.toList(), + 'to': to.toList(), + 'interceptor': interceptor.toList(), + 'assetId': assetId, + 'amount': amount, + 'txId': txId.toList(), + 'executeAt': executeAt.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(from); + size = size + const _i3.AccountId32Codec().sizeHint(to); + size = size + const _i3.AccountId32Codec().sizeHint(interceptor); + size = size + const _i1.OptionCodec(_i1.U32Codec.codec).sizeHint(assetId); + size = size + _i1.U128Codec.codec.sizeHint(amount); + size = size + const _i5.H256Codec().sizeHint(txId); + size = size + _i6.DispatchTime.codec.sizeHint(executeAt); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.U8ArrayCodec(32).encodeTo(from, output); + const _i1.U8ArrayCodec(32).encodeTo(to, output); + const _i1.U8ArrayCodec(32).encodeTo(interceptor, output); + const _i1.OptionCodec(_i1.U32Codec.codec).encodeTo(assetId, output); + _i1.U128Codec.codec.encodeTo(amount, output); + const _i1.U8ArrayCodec(32).encodeTo(txId, output); + _i6.DispatchTime.codec.encodeTo(executeAt, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TransactionScheduled && + _i9.listsEqual(other.from, from) && + _i9.listsEqual(other.to, to) && + _i9.listsEqual(other.interceptor, interceptor) && + other.assetId == assetId && + other.amount == amount && + _i9.listsEqual(other.txId, txId) && + other.executeAt == executeAt; + + @override + int get hashCode => Object.hash(from, to, interceptor, assetId, amount, txId, executeAt); +} + +/// A scheduled transaction has been successfully cancelled by the owner. +class TransactionCancelled extends Event { + const TransactionCancelled({required this.who, required this.txId}); + + factory TransactionCancelled._decode(_i1.Input input) { + return TransactionCancelled( + who: const _i1.U8ArrayCodec(32).decode(input), + txId: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// T::Hash + final _i5.H256 txId; + + @override + Map>> toJson() => { + 'TransactionCancelled': {'who': who.toList(), 'txId': txId.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + const _i5.H256Codec().sizeHint(txId); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + const _i1.U8ArrayCodec(32).encodeTo(txId, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TransactionCancelled && _i9.listsEqual(other.who, who) && _i9.listsEqual(other.txId, txId); + + @override + int get hashCode => Object.hash(who, txId); +} + +/// A scheduled transaction was executed by the scheduler. +class TransactionExecuted extends Event { + const TransactionExecuted({required this.txId, required this.result}); + + factory TransactionExecuted._decode(_i1.Input input) { + return TransactionExecuted( + txId: const _i1.U8ArrayCodec(32).decode(input), + result: const _i1.ResultCodec<_i7.PostDispatchInfo, _i8.DispatchErrorWithPostInfo>( + _i7.PostDispatchInfo.codec, + _i8.DispatchErrorWithPostInfo.codec, + ).decode(input), + ); + } + + /// T::Hash + final _i5.H256 txId; + + /// DispatchResultWithPostInfo + final _i1.Result<_i7.PostDispatchInfo, _i8.DispatchErrorWithPostInfo> result; + + @override + Map> toJson() => { + 'TransactionExecuted': {'txId': txId.toList(), 'result': result.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i5.H256Codec().sizeHint(txId); + size = + size + + const _i1.ResultCodec<_i7.PostDispatchInfo, _i8.DispatchErrorWithPostInfo>( + _i7.PostDispatchInfo.codec, + _i8.DispatchErrorWithPostInfo.codec, + ).sizeHint(result); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(txId, output); + const _i1.ResultCodec<_i7.PostDispatchInfo, _i8.DispatchErrorWithPostInfo>( + _i7.PostDispatchInfo.codec, + _i8.DispatchErrorWithPostInfo.codec, + ).encodeTo(result, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TransactionExecuted && _i9.listsEqual(other.txId, txId) && other.result == result; + + @override + int get hashCode => Object.hash(txId, result); +} + +/// Funds were recovered from a high security account by its guardian. +class FundsRecovered extends Event { + const FundsRecovered({required this.account, required this.guardian}); + + factory FundsRecovered._decode(_i1.Input input) { + return FundsRecovered( + account: const _i1.U8ArrayCodec(32).decode(input), + guardian: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 account; + + /// T::AccountId + final _i3.AccountId32 guardian; + + @override + Map>> toJson() => { + 'FundsRecovered': {'account': account.toList(), 'guardian': guardian.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(account); + size = size + const _i3.AccountId32Codec().sizeHint(guardian); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(32).encodeTo(account, output); + const _i1.U8ArrayCodec(32).encodeTo(guardian, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is FundsRecovered && _i9.listsEqual(other.account, account) && _i9.listsEqual(other.guardian, guardian); + + @override + int get hashCode => Object.hash(account, guardian); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/hold_reason.dart b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/hold_reason.dart new file mode 100644 index 00000000..12bf569f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pallet/hold_reason.dart @@ -0,0 +1,46 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum HoldReason { + scheduledTransfer('ScheduledTransfer', 0); + + const HoldReason(this.variantName, this.codecIndex); + + factory HoldReason.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $HoldReasonCodec codec = $HoldReasonCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $HoldReasonCodec with _i1.Codec { + const $HoldReasonCodec(); + + @override + HoldReason decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return HoldReason.scheduledTransfer; + default: + throw Exception('HoldReason: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(HoldReason value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pending_transfer.dart b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pending_transfer.dart new file mode 100644 index 00000000..823f72db --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_reversible_transfers/pending_transfer.dart @@ -0,0 +1,99 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../frame_support/traits/preimages/bounded.dart' as _i3; +import '../sp_core/crypto/account_id32.dart' as _i2; + +class PendingTransfer { + const PendingTransfer({ + required this.from, + required this.to, + required this.interceptor, + required this.call, + required this.amount, + }); + + factory PendingTransfer.decode(_i1.Input input) { + return codec.decode(input); + } + + /// AccountId + final _i2.AccountId32 from; + + /// AccountId + final _i2.AccountId32 to; + + /// AccountId + final _i2.AccountId32 interceptor; + + /// Call + final _i3.Bounded call; + + /// Balance + final BigInt amount; + + static const $PendingTransferCodec codec = $PendingTransferCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'from': from.toList(), + 'to': to.toList(), + 'interceptor': interceptor.toList(), + 'call': call.toJson(), + 'amount': amount, + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is PendingTransfer && + _i5.listsEqual(other.from, from) && + _i5.listsEqual(other.to, to) && + _i5.listsEqual(other.interceptor, interceptor) && + other.call == call && + other.amount == amount; + + @override + int get hashCode => Object.hash(from, to, interceptor, call, amount); +} + +class $PendingTransferCodec with _i1.Codec { + const $PendingTransferCodec(); + + @override + void encodeTo(PendingTransfer obj, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(obj.from, output); + const _i1.U8ArrayCodec(32).encodeTo(obj.to, output); + const _i1.U8ArrayCodec(32).encodeTo(obj.interceptor, output); + _i3.Bounded.codec.encodeTo(obj.call, output); + _i1.U128Codec.codec.encodeTo(obj.amount, output); + } + + @override + PendingTransfer decode(_i1.Input input) { + return PendingTransfer( + from: const _i1.U8ArrayCodec(32).decode(input), + to: const _i1.U8ArrayCodec(32).decode(input), + interceptor: const _i1.U8ArrayCodec(32).decode(input), + call: _i3.Bounded.codec.decode(input), + amount: _i1.U128Codec.codec.decode(input), + ); + } + + @override + int sizeHint(PendingTransfer obj) { + int size = 0; + size = size + const _i2.AccountId32Codec().sizeHint(obj.from); + size = size + const _i2.AccountId32Codec().sizeHint(obj.to); + size = size + const _i2.AccountId32Codec().sizeHint(obj.interceptor); + size = size + _i3.Bounded.codec.sizeHint(obj.call); + size = size + _i1.U128Codec.codec.sizeHint(obj.amount); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/call.dart new file mode 100644 index 00000000..92a790d4 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/call.dart @@ -0,0 +1,821 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i6; + +import '../../qp_scheduler/block_number_or_timestamp.dart' as _i4; +import '../../quantus_runtime/runtime_call.dart' as _i5; +import '../../tuples_1.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + Schedule schedule({ + required int when, + _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic, + required int priority, + required _i5.RuntimeCall call, + }) { + return Schedule(when: when, maybePeriodic: maybePeriodic, priority: priority, call: call); + } + + Cancel cancel({required _i4.BlockNumberOrTimestamp when, required int index}) { + return Cancel(when: when, index: index); + } + + ScheduleNamed scheduleNamed({ + required List id, + required int when, + _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic, + required int priority, + required _i5.RuntimeCall call, + }) { + return ScheduleNamed(id: id, when: when, maybePeriodic: maybePeriodic, priority: priority, call: call); + } + + CancelNamed cancelNamed({required List id}) { + return CancelNamed(id: id); + } + + ScheduleAfter scheduleAfter({ + required _i4.BlockNumberOrTimestamp after, + _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic, + required int priority, + required _i5.RuntimeCall call, + }) { + return ScheduleAfter(after: after, maybePeriodic: maybePeriodic, priority: priority, call: call); + } + + ScheduleNamedAfter scheduleNamedAfter({ + required List id, + required _i4.BlockNumberOrTimestamp after, + _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic, + required int priority, + required _i5.RuntimeCall call, + }) { + return ScheduleNamedAfter(id: id, after: after, maybePeriodic: maybePeriodic, priority: priority, call: call); + } + + SetRetry setRetry({ + required _i3.Tuple2<_i4.BlockNumberOrTimestamp, int> task, + required int retries, + required _i4.BlockNumberOrTimestamp period, + }) { + return SetRetry(task: task, retries: retries, period: period); + } + + SetRetryNamed setRetryNamed({ + required List id, + required int retries, + required _i4.BlockNumberOrTimestamp period, + }) { + return SetRetryNamed(id: id, retries: retries, period: period); + } + + CancelRetry cancelRetry({required _i3.Tuple2<_i4.BlockNumberOrTimestamp, int> task}) { + return CancelRetry(task: task); + } + + CancelRetryNamed cancelRetryNamed({required List id}) { + return CancelRetryNamed(id: id); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Schedule._decode(input); + case 1: + return Cancel._decode(input); + case 2: + return ScheduleNamed._decode(input); + case 3: + return CancelNamed._decode(input); + case 4: + return ScheduleAfter._decode(input); + case 5: + return ScheduleNamedAfter._decode(input); + case 6: + return SetRetry._decode(input); + case 7: + return SetRetryNamed._decode(input); + case 8: + return CancelRetry._decode(input); + case 9: + return CancelRetryNamed._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Schedule: + (value as Schedule).encodeTo(output); + break; + case Cancel: + (value as Cancel).encodeTo(output); + break; + case ScheduleNamed: + (value as ScheduleNamed).encodeTo(output); + break; + case CancelNamed: + (value as CancelNamed).encodeTo(output); + break; + case ScheduleAfter: + (value as ScheduleAfter).encodeTo(output); + break; + case ScheduleNamedAfter: + (value as ScheduleNamedAfter).encodeTo(output); + break; + case SetRetry: + (value as SetRetry).encodeTo(output); + break; + case SetRetryNamed: + (value as SetRetryNamed).encodeTo(output); + break; + case CancelRetry: + (value as CancelRetry).encodeTo(output); + break; + case CancelRetryNamed: + (value as CancelRetryNamed).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Schedule: + return (value as Schedule)._sizeHint(); + case Cancel: + return (value as Cancel)._sizeHint(); + case ScheduleNamed: + return (value as ScheduleNamed)._sizeHint(); + case CancelNamed: + return (value as CancelNamed)._sizeHint(); + case ScheduleAfter: + return (value as ScheduleAfter)._sizeHint(); + case ScheduleNamedAfter: + return (value as ScheduleNamedAfter)._sizeHint(); + case SetRetry: + return (value as SetRetry)._sizeHint(); + case SetRetryNamed: + return (value as SetRetryNamed)._sizeHint(); + case CancelRetry: + return (value as CancelRetry)._sizeHint(); + case CancelRetryNamed: + return (value as CancelRetryNamed)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Anonymously schedule a task. +class Schedule extends Call { + const Schedule({required this.when, this.maybePeriodic, required this.priority, required this.call}); + + factory Schedule._decode(_i1.Input input) { + return Schedule( + when: _i1.U32Codec.codec.decode(input), + maybePeriodic: const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).decode(input), + priority: _i1.U8Codec.codec.decode(input), + call: _i5.RuntimeCall.codec.decode(input), + ); + } + + /// BlockNumberFor + final int when; + + /// Option, T::Moment>> + final _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic; + + /// schedule::Priority + final int priority; + + /// Box<::RuntimeCall> + final _i5.RuntimeCall call; + + @override + Map> toJson() => { + 'schedule': { + 'when': when, + 'maybePeriodic': [maybePeriodic?.value0.toJson(), maybePeriodic?.value1], + 'priority': priority, + 'call': call.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(when); + size = + size + + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).sizeHint(maybePeriodic); + size = size + _i1.U8Codec.codec.sizeHint(priority); + size = size + _i5.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(when, output); + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).encodeTo(maybePeriodic, output); + _i1.U8Codec.codec.encodeTo(priority, output); + _i5.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Schedule && + other.when == when && + other.maybePeriodic == maybePeriodic && + other.priority == priority && + other.call == call; + + @override + int get hashCode => Object.hash(when, maybePeriodic, priority, call); +} + +/// Cancel an anonymously scheduled task. +class Cancel extends Call { + const Cancel({required this.when, required this.index}); + + factory Cancel._decode(_i1.Input input) { + return Cancel(when: _i4.BlockNumberOrTimestamp.codec.decode(input), index: _i1.U32Codec.codec.decode(input)); + } + + /// BlockNumberOrTimestampOf + final _i4.BlockNumberOrTimestamp when; + + /// u32 + final int index; + + @override + Map> toJson() => { + 'cancel': {'when': when.toJson(), 'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i4.BlockNumberOrTimestamp.codec.sizeHint(when); + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i4.BlockNumberOrTimestamp.codec.encodeTo(when, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Cancel && other.when == when && other.index == index; + + @override + int get hashCode => Object.hash(when, index); +} + +/// Schedule a named task. +class ScheduleNamed extends Call { + const ScheduleNamed({ + required this.id, + required this.when, + this.maybePeriodic, + required this.priority, + required this.call, + }); + + factory ScheduleNamed._decode(_i1.Input input) { + return ScheduleNamed( + id: const _i1.U8ArrayCodec(32).decode(input), + when: _i1.U32Codec.codec.decode(input), + maybePeriodic: const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).decode(input), + priority: _i1.U8Codec.codec.decode(input), + call: _i5.RuntimeCall.codec.decode(input), + ); + } + + /// TaskName + final List id; + + /// BlockNumberFor + final int when; + + /// Option, T::Moment>> + final _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic; + + /// schedule::Priority + final int priority; + + /// Box<::RuntimeCall> + final _i5.RuntimeCall call; + + @override + Map> toJson() => { + 'schedule_named': { + 'id': id.toList(), + 'when': when, + 'maybePeriodic': [maybePeriodic?.value0.toJson(), maybePeriodic?.value1], + 'priority': priority, + 'call': call.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(32).sizeHint(id); + size = size + _i1.U32Codec.codec.sizeHint(when); + size = + size + + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).sizeHint(maybePeriodic); + size = size + _i1.U8Codec.codec.sizeHint(priority); + size = size + _i5.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.U8ArrayCodec(32).encodeTo(id, output); + _i1.U32Codec.codec.encodeTo(when, output); + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).encodeTo(maybePeriodic, output); + _i1.U8Codec.codec.encodeTo(priority, output); + _i5.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ScheduleNamed && + _i6.listsEqual(other.id, id) && + other.when == when && + other.maybePeriodic == maybePeriodic && + other.priority == priority && + other.call == call; + + @override + int get hashCode => Object.hash(id, when, maybePeriodic, priority, call); +} + +/// Cancel a named scheduled task. +class CancelNamed extends Call { + const CancelNamed({required this.id}); + + factory CancelNamed._decode(_i1.Input input) { + return CancelNamed(id: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// TaskName + final List id; + + @override + Map>> toJson() => { + 'cancel_named': {'id': id.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(32).sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is CancelNamed && _i6.listsEqual(other.id, id); + + @override + int get hashCode => id.hashCode; +} + +/// Anonymously schedule a task after a delay. +class ScheduleAfter extends Call { + const ScheduleAfter({required this.after, this.maybePeriodic, required this.priority, required this.call}); + + factory ScheduleAfter._decode(_i1.Input input) { + return ScheduleAfter( + after: _i4.BlockNumberOrTimestamp.codec.decode(input), + maybePeriodic: const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).decode(input), + priority: _i1.U8Codec.codec.decode(input), + call: _i5.RuntimeCall.codec.decode(input), + ); + } + + /// BlockNumberOrTimestamp, T::Moment> + final _i4.BlockNumberOrTimestamp after; + + /// Option, T::Moment>> + final _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic; + + /// schedule::Priority + final int priority; + + /// Box<::RuntimeCall> + final _i5.RuntimeCall call; + + @override + Map> toJson() => { + 'schedule_after': { + 'after': after.toJson(), + 'maybePeriodic': [maybePeriodic?.value0.toJson(), maybePeriodic?.value1], + 'priority': priority, + 'call': call.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i4.BlockNumberOrTimestamp.codec.sizeHint(after); + size = + size + + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).sizeHint(maybePeriodic); + size = size + _i1.U8Codec.codec.sizeHint(priority); + size = size + _i5.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i4.BlockNumberOrTimestamp.codec.encodeTo(after, output); + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).encodeTo(maybePeriodic, output); + _i1.U8Codec.codec.encodeTo(priority, output); + _i5.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ScheduleAfter && + other.after == after && + other.maybePeriodic == maybePeriodic && + other.priority == priority && + other.call == call; + + @override + int get hashCode => Object.hash(after, maybePeriodic, priority, call); +} + +/// Schedule a named task after a delay. +class ScheduleNamedAfter extends Call { + const ScheduleNamedAfter({ + required this.id, + required this.after, + this.maybePeriodic, + required this.priority, + required this.call, + }); + + factory ScheduleNamedAfter._decode(_i1.Input input) { + return ScheduleNamedAfter( + id: const _i1.U8ArrayCodec(32).decode(input), + after: _i4.BlockNumberOrTimestamp.codec.decode(input), + maybePeriodic: const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).decode(input), + priority: _i1.U8Codec.codec.decode(input), + call: _i5.RuntimeCall.codec.decode(input), + ); + } + + /// TaskName + final List id; + + /// BlockNumberOrTimestamp, T::Moment> + final _i4.BlockNumberOrTimestamp after; + + /// Option, T::Moment>> + final _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic; + + /// schedule::Priority + final int priority; + + /// Box<::RuntimeCall> + final _i5.RuntimeCall call; + + @override + Map> toJson() => { + 'schedule_named_after': { + 'id': id.toList(), + 'after': after.toJson(), + 'maybePeriodic': [maybePeriodic?.value0.toJson(), maybePeriodic?.value1], + 'priority': priority, + 'call': call.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(32).sizeHint(id); + size = size + _i4.BlockNumberOrTimestamp.codec.sizeHint(after); + size = + size + + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).sizeHint(maybePeriodic); + size = size + _i1.U8Codec.codec.sizeHint(priority); + size = size + _i5.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.U8ArrayCodec(32).encodeTo(id, output); + _i4.BlockNumberOrTimestamp.codec.encodeTo(after, output); + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).encodeTo(maybePeriodic, output); + _i1.U8Codec.codec.encodeTo(priority, output); + _i5.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ScheduleNamedAfter && + _i6.listsEqual(other.id, id) && + other.after == after && + other.maybePeriodic == maybePeriodic && + other.priority == priority && + other.call == call; + + @override + int get hashCode => Object.hash(id, after, maybePeriodic, priority, call); +} + +/// Set a retry configuration for a task so that, in case its scheduled run fails, it will +/// be retried after `period` blocks, for a total amount of `retries` retries or until it +/// succeeds. +/// +/// Tasks which need to be scheduled for a retry are still subject to weight metering and +/// agenda space, same as a regular task. If a periodic task fails, it will be scheduled +/// normally while the task is retrying. +/// +/// Tasks scheduled as a result of a retry for a periodic task are unnamed, non-periodic +/// clones of the original task. Their retry configuration will be derived from the +/// original task's configuration, but will have a lower value for `remaining` than the +/// original `total_retries`. +class SetRetry extends Call { + const SetRetry({required this.task, required this.retries, required this.period}); + + factory SetRetry._decode(_i1.Input input) { + return SetRetry( + task: const _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>( + _i4.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + retries: _i1.U8Codec.codec.decode(input), + period: _i4.BlockNumberOrTimestamp.codec.decode(input), + ); + } + + /// TaskAddressOf + final _i3.Tuple2<_i4.BlockNumberOrTimestamp, int> task; + + /// u8 + final int retries; + + /// BlockNumberOrTimestampOf + final _i4.BlockNumberOrTimestamp period; + + @override + Map> toJson() => { + 'set_retry': { + 'task': [task.value0.toJson(), task.value1], + 'retries': retries, + 'period': period.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>( + _i4.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + size = size + _i1.U8Codec.codec.sizeHint(retries); + size = size + _i4.BlockNumberOrTimestamp.codec.sizeHint(period); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>( + _i4.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + _i1.U8Codec.codec.encodeTo(retries, output); + _i4.BlockNumberOrTimestamp.codec.encodeTo(period, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SetRetry && other.task == task && other.retries == retries && other.period == period; + + @override + int get hashCode => Object.hash(task, retries, period); +} + +/// Set a retry configuration for a named task so that, in case its scheduled run fails, it +/// will be retried after `period` blocks, for a total amount of `retries` retries or until +/// it succeeds. +/// +/// Tasks which need to be scheduled for a retry are still subject to weight metering and +/// agenda space, same as a regular task. If a periodic task fails, it will be scheduled +/// normally while the task is retrying. +/// +/// Tasks scheduled as a result of a retry for a periodic task are unnamed, non-periodic +/// clones of the original task. Their retry configuration will be derived from the +/// original task's configuration, but will have a lower value for `remaining` than the +/// original `total_retries`. +class SetRetryNamed extends Call { + const SetRetryNamed({required this.id, required this.retries, required this.period}); + + factory SetRetryNamed._decode(_i1.Input input) { + return SetRetryNamed( + id: const _i1.U8ArrayCodec(32).decode(input), + retries: _i1.U8Codec.codec.decode(input), + period: _i4.BlockNumberOrTimestamp.codec.decode(input), + ); + } + + /// TaskName + final List id; + + /// u8 + final int retries; + + /// BlockNumberOrTimestampOf + final _i4.BlockNumberOrTimestamp period; + + @override + Map> toJson() => { + 'set_retry_named': {'id': id.toList(), 'retries': retries, 'period': period.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(32).sizeHint(id); + size = size + _i1.U8Codec.codec.sizeHint(retries); + size = size + _i4.BlockNumberOrTimestamp.codec.sizeHint(period); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + const _i1.U8ArrayCodec(32).encodeTo(id, output); + _i1.U8Codec.codec.encodeTo(retries, output); + _i4.BlockNumberOrTimestamp.codec.encodeTo(period, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SetRetryNamed && _i6.listsEqual(other.id, id) && other.retries == retries && other.period == period; + + @override + int get hashCode => Object.hash(id, retries, period); +} + +/// Removes the retry configuration of a task. +class CancelRetry extends Call { + const CancelRetry({required this.task}); + + factory CancelRetry._decode(_i1.Input input) { + return CancelRetry( + task: const _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>( + _i4.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + ); + } + + /// TaskAddressOf + final _i3.Tuple2<_i4.BlockNumberOrTimestamp, int> task; + + @override + Map>> toJson() => { + 'cancel_retry': { + 'task': [task.value0.toJson(), task.value1], + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>( + _i4.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + const _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>( + _i4.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is CancelRetry && other.task == task; + + @override + int get hashCode => task.hashCode; +} + +/// Cancel the retry configuration of a named task. +class CancelRetryNamed extends Call { + const CancelRetryNamed({required this.id}); + + factory CancelRetryNamed._decode(_i1.Input input) { + return CancelRetryNamed(id: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// TaskName + final List id; + + @override + Map>> toJson() => { + 'cancel_retry_named': {'id': id.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(32).sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + const _i1.U8ArrayCodec(32).encodeTo(id, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is CancelRetryNamed && _i6.listsEqual(other.id, id); + + @override + int get hashCode => id.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/error.dart new file mode 100644 index 00000000..1d13b0d3 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/error.dart @@ -0,0 +1,73 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Failed to schedule a call + failedToSchedule('FailedToSchedule', 0), + + /// Cannot find the scheduled call. + notFound('NotFound', 1), + + /// Given target block number is in the past. + targetBlockNumberInPast('TargetBlockNumberInPast', 2), + + /// Given target timestamp is in the past. + targetTimestampInPast('TargetTimestampInPast', 3), + + /// Reschedule failed because it does not change scheduled time. + rescheduleNoChange('RescheduleNoChange', 4), + + /// Attempt to use a non-named function on a named task. + named('Named', 5); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.failedToSchedule; + case 1: + return Error.notFound; + case 2: + return Error.targetBlockNumberInPast; + case 3: + return Error.targetTimestampInPast; + case 4: + return Error.rescheduleNoChange; + case 5: + return Error.named; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/event.dart new file mode 100644 index 00000000..09186880 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/pallet/event.dart @@ -0,0 +1,690 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../qp_scheduler/block_number_or_timestamp.dart' as _i3; +import '../../sp_runtime/dispatch_error.dart' as _i5; +import '../../tuples_1.dart' as _i4; + +/// Events type. +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + Scheduled scheduled({required _i3.BlockNumberOrTimestamp when, required int index}) { + return Scheduled(when: when, index: index); + } + + Canceled canceled({required _i3.BlockNumberOrTimestamp when, required int index}) { + return Canceled(when: when, index: index); + } + + Dispatched dispatched({ + required _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task, + List? id, + required _i1.Result result, + }) { + return Dispatched(task: task, id: id, result: result); + } + + RetrySet retrySet({ + required _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task, + List? id, + required _i3.BlockNumberOrTimestamp period, + required int retries, + }) { + return RetrySet(task: task, id: id, period: period, retries: retries); + } + + RetryCancelled retryCancelled({required _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task, List? id}) { + return RetryCancelled(task: task, id: id); + } + + CallUnavailable callUnavailable({required _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task, List? id}) { + return CallUnavailable(task: task, id: id); + } + + PeriodicFailed periodicFailed({required _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task, List? id}) { + return PeriodicFailed(task: task, id: id); + } + + RetryFailed retryFailed({required _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task, List? id}) { + return RetryFailed(task: task, id: id); + } + + PermanentlyOverweight permanentlyOverweight({ + required _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task, + List? id, + }) { + return PermanentlyOverweight(task: task, id: id); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Scheduled._decode(input); + case 1: + return Canceled._decode(input); + case 2: + return Dispatched._decode(input); + case 3: + return RetrySet._decode(input); + case 4: + return RetryCancelled._decode(input); + case 5: + return CallUnavailable._decode(input); + case 6: + return PeriodicFailed._decode(input); + case 7: + return RetryFailed._decode(input); + case 8: + return PermanentlyOverweight._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Scheduled: + (value as Scheduled).encodeTo(output); + break; + case Canceled: + (value as Canceled).encodeTo(output); + break; + case Dispatched: + (value as Dispatched).encodeTo(output); + break; + case RetrySet: + (value as RetrySet).encodeTo(output); + break; + case RetryCancelled: + (value as RetryCancelled).encodeTo(output); + break; + case CallUnavailable: + (value as CallUnavailable).encodeTo(output); + break; + case PeriodicFailed: + (value as PeriodicFailed).encodeTo(output); + break; + case RetryFailed: + (value as RetryFailed).encodeTo(output); + break; + case PermanentlyOverweight: + (value as PermanentlyOverweight).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Scheduled: + return (value as Scheduled)._sizeHint(); + case Canceled: + return (value as Canceled)._sizeHint(); + case Dispatched: + return (value as Dispatched)._sizeHint(); + case RetrySet: + return (value as RetrySet)._sizeHint(); + case RetryCancelled: + return (value as RetryCancelled)._sizeHint(); + case CallUnavailable: + return (value as CallUnavailable)._sizeHint(); + case PeriodicFailed: + return (value as PeriodicFailed)._sizeHint(); + case RetryFailed: + return (value as RetryFailed)._sizeHint(); + case PermanentlyOverweight: + return (value as PermanentlyOverweight)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Scheduled some task. +class Scheduled extends Event { + const Scheduled({required this.when, required this.index}); + + factory Scheduled._decode(_i1.Input input) { + return Scheduled(when: _i3.BlockNumberOrTimestamp.codec.decode(input), index: _i1.U32Codec.codec.decode(input)); + } + + /// BlockNumberOrTimestampOf + final _i3.BlockNumberOrTimestamp when; + + /// u32 + final int index; + + @override + Map> toJson() => { + 'Scheduled': {'when': when.toJson(), 'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.BlockNumberOrTimestamp.codec.sizeHint(when); + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.BlockNumberOrTimestamp.codec.encodeTo(when, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Scheduled && other.when == when && other.index == index; + + @override + int get hashCode => Object.hash(when, index); +} + +/// Canceled some task. +class Canceled extends Event { + const Canceled({required this.when, required this.index}); + + factory Canceled._decode(_i1.Input input) { + return Canceled(when: _i3.BlockNumberOrTimestamp.codec.decode(input), index: _i1.U32Codec.codec.decode(input)); + } + + /// BlockNumberOrTimestampOf + final _i3.BlockNumberOrTimestamp when; + + /// u32 + final int index; + + @override + Map> toJson() => { + 'Canceled': {'when': when.toJson(), 'index': index}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.BlockNumberOrTimestamp.codec.sizeHint(when); + size = size + _i1.U32Codec.codec.sizeHint(index); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i3.BlockNumberOrTimestamp.codec.encodeTo(when, output); + _i1.U32Codec.codec.encodeTo(index, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Canceled && other.when == when && other.index == index; + + @override + int get hashCode => Object.hash(when, index); +} + +/// Dispatched some task. +class Dispatched extends Event { + const Dispatched({required this.task, this.id, required this.result}); + + factory Dispatched._decode(_i1.Input input) { + return Dispatched( + task: const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + id: const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).decode(input), + result: const _i1.ResultCodec( + _i1.NullCodec.codec, + _i5.DispatchError.codec, + ).decode(input), + ); + } + + /// TaskAddressOf + final _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task; + + /// Option + final List? id; + + /// DispatchResult + final _i1.Result result; + + @override + Map> toJson() => { + 'Dispatched': { + 'task': [task.value0.toJson(), task.value1], + 'id': id?.toList(), + 'result': result.toJson(), + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + size = size + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).sizeHint(id); + size = + size + + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i5.DispatchError.codec, + ).sizeHint(result); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).encodeTo(id, output); + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i5.DispatchError.codec, + ).encodeTo(result, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Dispatched && other.task == task && other.id == id && other.result == result; + + @override + int get hashCode => Object.hash(task, id, result); +} + +/// Set a retry configuration for some task. +class RetrySet extends Event { + const RetrySet({required this.task, this.id, required this.period, required this.retries}); + + factory RetrySet._decode(_i1.Input input) { + return RetrySet( + task: const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + id: const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).decode(input), + period: _i3.BlockNumberOrTimestamp.codec.decode(input), + retries: _i1.U8Codec.codec.decode(input), + ); + } + + /// TaskAddressOf + final _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task; + + /// Option + final List? id; + + /// BlockNumberOrTimestampOf + final _i3.BlockNumberOrTimestamp period; + + /// u8 + final int retries; + + @override + Map> toJson() => { + 'RetrySet': { + 'task': [task.value0.toJson(), task.value1], + 'id': id?.toList(), + 'period': period.toJson(), + 'retries': retries, + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + size = size + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).sizeHint(id); + size = size + _i3.BlockNumberOrTimestamp.codec.sizeHint(period); + size = size + _i1.U8Codec.codec.sizeHint(retries); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).encodeTo(id, output); + _i3.BlockNumberOrTimestamp.codec.encodeTo(period, output); + _i1.U8Codec.codec.encodeTo(retries, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RetrySet && other.task == task && other.id == id && other.period == period && other.retries == retries; + + @override + int get hashCode => Object.hash(task, id, period, retries); +} + +/// Cancel a retry configuration for some task. +class RetryCancelled extends Event { + const RetryCancelled({required this.task, this.id}); + + factory RetryCancelled._decode(_i1.Input input) { + return RetryCancelled( + task: const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + id: const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).decode(input), + ); + } + + /// TaskAddressOf + final _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task; + + /// Option + final List? id; + + @override + Map?>> toJson() => { + 'RetryCancelled': { + 'task': [task.value0.toJson(), task.value1], + 'id': id?.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + size = size + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).encodeTo(id, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RetryCancelled && other.task == task && other.id == id; + + @override + int get hashCode => Object.hash(task, id); +} + +/// The call for the provided hash was not found so the task has been aborted. +class CallUnavailable extends Event { + const CallUnavailable({required this.task, this.id}); + + factory CallUnavailable._decode(_i1.Input input) { + return CallUnavailable( + task: const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + id: const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).decode(input), + ); + } + + /// TaskAddressOf + final _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task; + + /// Option + final List? id; + + @override + Map?>> toJson() => { + 'CallUnavailable': { + 'task': [task.value0.toJson(), task.value1], + 'id': id?.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + size = size + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).encodeTo(id, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is CallUnavailable && other.task == task && other.id == id; + + @override + int get hashCode => Object.hash(task, id); +} + +/// The given task was unable to be renewed since the agenda is full at that block. +class PeriodicFailed extends Event { + const PeriodicFailed({required this.task, this.id}); + + factory PeriodicFailed._decode(_i1.Input input) { + return PeriodicFailed( + task: const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + id: const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).decode(input), + ); + } + + /// TaskAddressOf + final _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task; + + /// Option + final List? id; + + @override + Map?>> toJson() => { + 'PeriodicFailed': { + 'task': [task.value0.toJson(), task.value1], + 'id': id?.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + size = size + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).encodeTo(id, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is PeriodicFailed && other.task == task && other.id == id; + + @override + int get hashCode => Object.hash(task, id); +} + +/// The given task was unable to be retried since the agenda is full at that block or there +/// was not enough weight to reschedule it. +class RetryFailed extends Event { + const RetryFailed({required this.task, this.id}); + + factory RetryFailed._decode(_i1.Input input) { + return RetryFailed( + task: const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + id: const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).decode(input), + ); + } + + /// TaskAddressOf + final _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task; + + /// Option + final List? id; + + @override + Map?>> toJson() => { + 'RetryFailed': { + 'task': [task.value0.toJson(), task.value1], + 'id': id?.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + size = size + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).encodeTo(id, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is RetryFailed && other.task == task && other.id == id; + + @override + int get hashCode => Object.hash(task, id); +} + +/// The given task can never be executed since it is overweight. +class PermanentlyOverweight extends Event { + const PermanentlyOverweight({required this.task, this.id}); + + factory PermanentlyOverweight._decode(_i1.Input input) { + return PermanentlyOverweight( + task: const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).decode(input), + id: const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).decode(input), + ); + } + + /// TaskAddressOf + final _i4.Tuple2<_i3.BlockNumberOrTimestamp, int> task; + + /// Option + final List? id; + + @override + Map?>> toJson() => { + 'PermanentlyOverweight': { + 'task': [task.value0.toJson(), task.value1], + 'id': id?.toList(), + }, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).sizeHint(task); + size = size + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).sizeHint(id); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + const _i4.Tuple2Codec<_i3.BlockNumberOrTimestamp, int>( + _i3.BlockNumberOrTimestamp.codec, + _i1.U32Codec.codec, + ).encodeTo(task, output); + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).encodeTo(id, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is PermanentlyOverweight && other.task == task && other.id == id; + + @override + int get hashCode => Object.hash(task, id); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_scheduler/retry_config.dart b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/retry_config.dart new file mode 100644 index 00000000..57203819 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/retry_config.dart @@ -0,0 +1,71 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../qp_scheduler/block_number_or_timestamp.dart' as _i2; + +class RetryConfig { + const RetryConfig({required this.totalRetries, required this.remaining, required this.period}); + + factory RetryConfig.decode(_i1.Input input) { + return codec.decode(input); + } + + /// u8 + final int totalRetries; + + /// u8 + final int remaining; + + /// Period + final _i2.BlockNumberOrTimestamp period; + + static const $RetryConfigCodec codec = $RetryConfigCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'totalRetries': totalRetries, 'remaining': remaining, 'period': period.toJson()}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RetryConfig && + other.totalRetries == totalRetries && + other.remaining == remaining && + other.period == period; + + @override + int get hashCode => Object.hash(totalRetries, remaining, period); +} + +class $RetryConfigCodec with _i1.Codec { + const $RetryConfigCodec(); + + @override + void encodeTo(RetryConfig obj, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(obj.totalRetries, output); + _i1.U8Codec.codec.encodeTo(obj.remaining, output); + _i2.BlockNumberOrTimestamp.codec.encodeTo(obj.period, output); + } + + @override + RetryConfig decode(_i1.Input input) { + return RetryConfig( + totalRetries: _i1.U8Codec.codec.decode(input), + remaining: _i1.U8Codec.codec.decode(input), + period: _i2.BlockNumberOrTimestamp.codec.decode(input), + ); + } + + @override + int sizeHint(RetryConfig obj) { + int size = 0; + size = size + _i1.U8Codec.codec.sizeHint(obj.totalRetries); + size = size + _i1.U8Codec.codec.sizeHint(obj.remaining); + size = size + _i2.BlockNumberOrTimestamp.codec.sizeHint(obj.period); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_scheduler/scheduled.dart b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/scheduled.dart new file mode 100644 index 00000000..0fec4c35 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_scheduler/scheduled.dart @@ -0,0 +1,102 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i6; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../frame_support/traits/preimages/bounded.dart' as _i2; +import '../qp_scheduler/block_number_or_timestamp.dart' as _i4; +import '../quantus_runtime/origin_caller.dart' as _i5; +import '../tuples_1.dart' as _i3; + +class Scheduled { + const Scheduled({this.maybeId, required this.priority, required this.call, this.maybePeriodic, required this.origin}); + + factory Scheduled.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Option + final List? maybeId; + + /// schedule::Priority + final int priority; + + /// Call + final _i2.Bounded call; + + /// Option> + final _i3.Tuple2<_i4.BlockNumberOrTimestamp, int>? maybePeriodic; + + /// PalletsOrigin + final _i5.OriginCaller origin; + + static const $ScheduledCodec codec = $ScheduledCodec(); + + _i6.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'maybeId': maybeId?.toList(), + 'priority': priority, + 'call': call.toJson(), + 'maybePeriodic': [maybePeriodic?.value0.toJson(), maybePeriodic?.value1], + 'origin': origin.toJson(), + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Scheduled && + other.maybeId == maybeId && + other.priority == priority && + other.call == call && + other.maybePeriodic == maybePeriodic && + other.origin == origin; + + @override + int get hashCode => Object.hash(maybeId, priority, call, maybePeriodic, origin); +} + +class $ScheduledCodec with _i1.Codec { + const $ScheduledCodec(); + + @override + void encodeTo(Scheduled obj, _i1.Output output) { + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).encodeTo(obj.maybeId, output); + _i1.U8Codec.codec.encodeTo(obj.priority, output); + _i2.Bounded.codec.encodeTo(obj.call, output); + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).encodeTo(obj.maybePeriodic, output); + _i5.OriginCaller.codec.encodeTo(obj.origin, output); + } + + @override + Scheduled decode(_i1.Input input) { + return Scheduled( + maybeId: const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).decode(input), + priority: _i1.U8Codec.codec.decode(input), + call: _i2.Bounded.codec.decode(input), + maybePeriodic: const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).decode(input), + origin: _i5.OriginCaller.codec.decode(input), + ); + } + + @override + int sizeHint(Scheduled obj) { + int size = 0; + size = size + const _i1.OptionCodec>(_i1.U8ArrayCodec(32)).sizeHint(obj.maybeId); + size = size + _i1.U8Codec.codec.sizeHint(obj.priority); + size = size + _i2.Bounded.codec.sizeHint(obj.call); + size = + size + + const _i1.OptionCodec<_i3.Tuple2<_i4.BlockNumberOrTimestamp, int>>( + _i3.Tuple2Codec<_i4.BlockNumberOrTimestamp, int>(_i4.BlockNumberOrTimestamp.codec, _i1.U32Codec.codec), + ).sizeHint(obj.maybePeriodic); + size = size + _i5.OriginCaller.codec.sizeHint(obj.origin); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/call.dart new file mode 100644 index 00000000..9513e6c8 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/call.dart @@ -0,0 +1,296 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../quantus_runtime/runtime_call.dart' as _i3; +import '../../sp_runtime/multiaddress/multi_address.dart' as _i5; +import '../../sp_weights/weight_v2/weight.dart' as _i4; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Call { + const $Call(); + + Sudo sudo({required _i3.RuntimeCall call}) { + return Sudo(call: call); + } + + SudoUncheckedWeight sudoUncheckedWeight({required _i3.RuntimeCall call, required _i4.Weight weight}) { + return SudoUncheckedWeight(call: call, weight: weight); + } + + SetKey setKey({required _i5.MultiAddress new_}) { + return SetKey(new_: new_); + } + + SudoAs sudoAs({required _i5.MultiAddress who, required _i3.RuntimeCall call}) { + return SudoAs(who: who, call: call); + } + + RemoveKey removeKey() { + return RemoveKey(); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Sudo._decode(input); + case 1: + return SudoUncheckedWeight._decode(input); + case 2: + return SetKey._decode(input); + case 3: + return SudoAs._decode(input); + case 4: + return const RemoveKey(); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Sudo: + (value as Sudo).encodeTo(output); + break; + case SudoUncheckedWeight: + (value as SudoUncheckedWeight).encodeTo(output); + break; + case SetKey: + (value as SetKey).encodeTo(output); + break; + case SudoAs: + (value as SudoAs).encodeTo(output); + break; + case RemoveKey: + (value as RemoveKey).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Sudo: + return (value as Sudo)._sizeHint(); + case SudoUncheckedWeight: + return (value as SudoUncheckedWeight)._sizeHint(); + case SetKey: + return (value as SetKey)._sizeHint(); + case SudoAs: + return (value as SudoAs)._sizeHint(); + case RemoveKey: + return 1; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Authenticates the sudo key and dispatches a function call with `Root` origin. +class Sudo extends Call { + const Sudo({required this.call}); + + factory Sudo._decode(_i1.Input input) { + return Sudo(call: _i3.RuntimeCall.codec.decode(input)); + } + + /// Box<::RuntimeCall> + final _i3.RuntimeCall call; + + @override + Map>>> toJson() => { + 'sudo': {'call': call.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Sudo && other.call == call; + + @override + int get hashCode => call.hashCode; +} + +/// Authenticates the sudo key and dispatches a function call with `Root` origin. +/// This function does not check the weight of the call, and instead allows the +/// Sudo user to specify the weight of the call. +/// +/// The dispatch origin for this call must be _Signed_. +class SudoUncheckedWeight extends Call { + const SudoUncheckedWeight({required this.call, required this.weight}); + + factory SudoUncheckedWeight._decode(_i1.Input input) { + return SudoUncheckedWeight(call: _i3.RuntimeCall.codec.decode(input), weight: _i4.Weight.codec.decode(input)); + } + + /// Box<::RuntimeCall> + final _i3.RuntimeCall call; + + /// Weight + final _i4.Weight weight; + + @override + Map>> toJson() => { + 'sudo_unchecked_weight': {'call': call.toJson(), 'weight': weight.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.RuntimeCall.codec.sizeHint(call); + size = size + _i4.Weight.codec.sizeHint(weight); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i3.RuntimeCall.codec.encodeTo(call, output); + _i4.Weight.codec.encodeTo(weight, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is SudoUncheckedWeight && other.call == call && other.weight == weight; + + @override + int get hashCode => Object.hash(call, weight); +} + +/// Authenticates the current sudo key and sets the given AccountId (`new`) as the new sudo +/// key. +class SetKey extends Call { + const SetKey({required this.new_}); + + factory SetKey._decode(_i1.Input input) { + return SetKey(new_: _i5.MultiAddress.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i5.MultiAddress new_; + + @override + Map>> toJson() => { + 'set_key': {'new': new_.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i5.MultiAddress.codec.sizeHint(new_); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i5.MultiAddress.codec.encodeTo(new_, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is SetKey && other.new_ == new_; + + @override + int get hashCode => new_.hashCode; +} + +/// Authenticates the sudo key and dispatches a function call with `Signed` origin from +/// a given account. +/// +/// The dispatch origin for this call must be _Signed_. +class SudoAs extends Call { + const SudoAs({required this.who, required this.call}); + + factory SudoAs._decode(_i1.Input input) { + return SudoAs(who: _i5.MultiAddress.codec.decode(input), call: _i3.RuntimeCall.codec.decode(input)); + } + + /// AccountIdLookupOf + final _i5.MultiAddress who; + + /// Box<::RuntimeCall> + final _i3.RuntimeCall call; + + @override + Map>> toJson() => { + 'sudo_as': {'who': who.toJson(), 'call': call.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i5.MultiAddress.codec.sizeHint(who); + size = size + _i3.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i5.MultiAddress.codec.encodeTo(who, output); + _i3.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is SudoAs && other.who == who && other.call == call; + + @override + int get hashCode => Object.hash(who, call); +} + +/// Permanently removes the sudo key. +/// +/// **This cannot be un-done.** +class RemoveKey extends Call { + const RemoveKey(); + + @override + Map toJson() => {'remove_key': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + } + + @override + bool operator ==(Object other) => other is RemoveKey; + + @override + int get hashCode => runtimeType.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/error.dart new file mode 100644 index 00000000..414a04dd --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/error.dart @@ -0,0 +1,48 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// Error for the Sudo pallet. +enum Error { + /// Sender must be the Sudo account. + requireSudo('RequireSudo', 0); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.requireSudo; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/event.dart new file mode 100644 index 00000000..7c57f805 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_sudo/pallet/event.dart @@ -0,0 +1,269 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../../sp_core/crypto/account_id32.dart' as _i4; +import '../../sp_runtime/dispatch_error.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Event { + const $Event(); + + Sudid sudid({required _i1.Result sudoResult}) { + return Sudid(sudoResult: sudoResult); + } + + KeyChanged keyChanged({_i4.AccountId32? old, required _i4.AccountId32 new_}) { + return KeyChanged(old: old, new_: new_); + } + + KeyRemoved keyRemoved() { + return KeyRemoved(); + } + + SudoAsDone sudoAsDone({required _i1.Result sudoResult}) { + return SudoAsDone(sudoResult: sudoResult); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Sudid._decode(input); + case 1: + return KeyChanged._decode(input); + case 2: + return const KeyRemoved(); + case 3: + return SudoAsDone._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case Sudid: + (value as Sudid).encodeTo(output); + break; + case KeyChanged: + (value as KeyChanged).encodeTo(output); + break; + case KeyRemoved: + (value as KeyRemoved).encodeTo(output); + break; + case SudoAsDone: + (value as SudoAsDone).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case Sudid: + return (value as Sudid)._sizeHint(); + case KeyChanged: + return (value as KeyChanged)._sizeHint(); + case KeyRemoved: + return 1; + case SudoAsDone: + return (value as SudoAsDone)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A sudo call just took place. +class Sudid extends Event { + const Sudid({required this.sudoResult}); + + factory Sudid._decode(_i1.Input input) { + return Sudid( + sudoResult: const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).decode(input), + ); + } + + /// DispatchResult + /// The result of the call made by the sudo user. + final _i1.Result sudoResult; + + @override + Map>> toJson() => { + 'Sudid': {'sudoResult': sudoResult.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).sizeHint(sudoResult); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).encodeTo(sudoResult, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Sudid && other.sudoResult == sudoResult; + + @override + int get hashCode => sudoResult.hashCode; +} + +/// The sudo key has been updated. +class KeyChanged extends Event { + const KeyChanged({this.old, required this.new_}); + + factory KeyChanged._decode(_i1.Input input) { + return KeyChanged( + old: const _i1.OptionCodec<_i4.AccountId32>(_i4.AccountId32Codec()).decode(input), + new_: const _i1.U8ArrayCodec(32).decode(input), + ); + } + + /// Option + /// The old sudo key (if one was previously set). + final _i4.AccountId32? old; + + /// T::AccountId + /// The new sudo key (if one was set). + final _i4.AccountId32 new_; + + @override + Map?>> toJson() => { + 'KeyChanged': {'old': old?.toList(), 'new': new_.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.OptionCodec<_i4.AccountId32>(_i4.AccountId32Codec()).sizeHint(old); + size = size + const _i4.AccountId32Codec().sizeHint(new_); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + const _i1.OptionCodec<_i4.AccountId32>(_i4.AccountId32Codec()).encodeTo(old, output); + const _i1.U8ArrayCodec(32).encodeTo(new_, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is KeyChanged && other.old == old && _i5.listsEqual(other.new_, new_); + + @override + int get hashCode => Object.hash(old, new_); +} + +/// The key was permanently removed. +class KeyRemoved extends Event { + const KeyRemoved(); + + @override + Map toJson() => {'KeyRemoved': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + } + + @override + bool operator ==(Object other) => other is KeyRemoved; + + @override + int get hashCode => runtimeType.hashCode; +} + +/// A [sudo_as](Pallet::sudo_as) call just took place. +class SudoAsDone extends Event { + const SudoAsDone({required this.sudoResult}); + + factory SudoAsDone._decode(_i1.Input input) { + return SudoAsDone( + sudoResult: const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).decode(input), + ); + } + + /// DispatchResult + /// The result of the call made by the sudo user. + final _i1.Result sudoResult; + + @override + Map>> toJson() => { + 'SudoAsDone': {'sudoResult': sudoResult.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).sizeHint(sudoResult); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).encodeTo(sudoResult, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is SudoAsDone && other.sudoResult == sudoResult; + + @override + int get hashCode => sudoResult.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_timestamp/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_timestamp/pallet/call.dart new file mode 100644 index 00000000..513d9777 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_timestamp/pallet/call.dart @@ -0,0 +1,125 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + Set set({required BigInt now}) { + return Set(now: now); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Set._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Set: + (value as Set).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Set: + return (value as Set)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Set the current time. +/// +/// This call should be invoked exactly once per block. It will panic at the finalization +/// phase, if this call hasn't been invoked by that time. +/// +/// The timestamp should be greater than the previous one by the amount specified by +/// [`Config::MinimumPeriod`]. +/// +/// The dispatch origin for this call must be _None_. +/// +/// This dispatch class is _Mandatory_ to ensure it gets executed in the block. Be aware +/// that changing the complexity of this call could result exhausting the resources in a +/// block to execute any other calls. +/// +/// ## Complexity +/// - `O(1)` (Note that implementations of `OnTimestampSet` must also be `O(1)`) +/// - 1 storage read and 1 storage mutation (codec `O(1)` because of `DidUpdate::take` in +/// `on_finalize`) +/// - 1 event handler `on_timestamp_set`. Must be `O(1)`. +class Set extends Call { + const Set({required this.now}); + + factory Set._decode(_i1.Input input) { + return Set(now: _i1.CompactBigIntCodec.codec.decode(input)); + } + + /// T::Moment + final BigInt now; + + @override + Map> toJson() => { + 'set': {'now': now}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(now); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.CompactBigIntCodec.codec.encodeTo(now, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Set && other.now == now; + + @override + int get hashCode => now.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/charge_transaction_payment.dart b/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/charge_transaction_payment.dart new file mode 100644 index 00000000..7146a0fb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/charge_transaction_payment.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef ChargeTransactionPayment = BigInt; + +class ChargeTransactionPaymentCodec with _i1.Codec { + const ChargeTransactionPaymentCodec(); + + @override + ChargeTransactionPayment decode(_i1.Input input) { + return _i1.CompactBigIntCodec.codec.decode(input); + } + + @override + void encodeTo(ChargeTransactionPayment value, _i1.Output output) { + _i1.CompactBigIntCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(ChargeTransactionPayment value) { + return _i1.CompactBigIntCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/pallet/event.dart new file mode 100644 index 00000000..a0f28f9a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/pallet/event.dart @@ -0,0 +1,131 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + TransactionFeePaid transactionFeePaid({ + required _i3.AccountId32 who, + required BigInt actualFee, + required BigInt tip, + }) { + return TransactionFeePaid(who: who, actualFee: actualFee, tip: tip); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return TransactionFeePaid._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case TransactionFeePaid: + (value as TransactionFeePaid).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case TransactionFeePaid: + return (value as TransactionFeePaid)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// A transaction fee `actual_fee`, of which `tip` was added to the minimum inclusion fee, +/// has been paid by `who`. +class TransactionFeePaid extends Event { + const TransactionFeePaid({required this.who, required this.actualFee, required this.tip}); + + factory TransactionFeePaid._decode(_i1.Input input) { + return TransactionFeePaid( + who: const _i1.U8ArrayCodec(32).decode(input), + actualFee: _i1.U128Codec.codec.decode(input), + tip: _i1.U128Codec.codec.decode(input), + ); + } + + /// T::AccountId + final _i3.AccountId32 who; + + /// BalanceOf + final BigInt actualFee; + + /// BalanceOf + final BigInt tip; + + @override + Map> toJson() => { + 'TransactionFeePaid': {'who': who.toList(), 'actualFee': actualFee, 'tip': tip}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(who); + size = size + _i1.U128Codec.codec.sizeHint(actualFee); + size = size + _i1.U128Codec.codec.sizeHint(tip); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(who, output); + _i1.U128Codec.codec.encodeTo(actualFee, output); + _i1.U128Codec.codec.encodeTo(tip, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is TransactionFeePaid && _i4.listsEqual(other.who, who) && other.actualFee == actualFee && other.tip == tip; + + @override + int get hashCode => Object.hash(who, actualFee, tip); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/releases.dart b/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/releases.dart new file mode 100644 index 00000000..8669e3ca --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_transaction_payment/releases.dart @@ -0,0 +1,49 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum Releases { + v1Ancient('V1Ancient', 0), + v2('V2', 1); + + const Releases(this.variantName, this.codecIndex); + + factory Releases.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ReleasesCodec codec = $ReleasesCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ReleasesCodec with _i1.Codec { + const $ReleasesCodec(); + + @override + Releases decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Releases.v1Ancient; + case 1: + return Releases.v2; + default: + throw Exception('Releases: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Releases value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/call.dart new file mode 100644 index 00000000..569ee543 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/call.dart @@ -0,0 +1,156 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + SetTreasuryAccount setTreasuryAccount({required _i3.AccountId32 account}) { + return SetTreasuryAccount(account: account); + } + + SetTreasuryPortion setTreasuryPortion({required int portion}) { + return SetTreasuryPortion(portion: portion); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return SetTreasuryAccount._decode(input); + case 1: + return SetTreasuryPortion._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case SetTreasuryAccount: + (value as SetTreasuryAccount).encodeTo(output); + break; + case SetTreasuryPortion: + (value as SetTreasuryPortion).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case SetTreasuryAccount: + return (value as SetTreasuryAccount)._sizeHint(); + case SetTreasuryPortion: + return (value as SetTreasuryPortion)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Set the treasury account. Root only. +class SetTreasuryAccount extends Call { + const SetTreasuryAccount({required this.account}); + + factory SetTreasuryAccount._decode(_i1.Input input) { + return SetTreasuryAccount(account: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 account; + + @override + Map>> toJson() => { + 'set_treasury_account': {'account': account.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(account); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(account, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is SetTreasuryAccount && _i4.listsEqual(other.account, account); + + @override + int get hashCode => account.hashCode; +} + +/// Set the treasury portion (0-100). Root only. +class SetTreasuryPortion extends Call { + const SetTreasuryPortion({required this.portion}); + + factory SetTreasuryPortion._decode(_i1.Input input) { + return SetTreasuryPortion(portion: _i1.U8Codec.codec.decode(input)); + } + + /// u8 + final int portion; + + @override + Map> toJson() => { + 'set_treasury_portion': {'portion': portion}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(portion); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U8Codec.codec.encodeTo(portion, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is SetTreasuryPortion && other.portion == portion; + + @override + int get hashCode => portion.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/error.dart new file mode 100644 index 00000000..11153025 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/error.dart @@ -0,0 +1,47 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + invalidPortion('InvalidPortion', 0); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.invalidPortion; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/event.dart new file mode 100644 index 00000000..3ac6acbe --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_treasury/pallet/event.dart @@ -0,0 +1,155 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + TreasuryAccountUpdated treasuryAccountUpdated({required _i3.AccountId32 newAccount}) { + return TreasuryAccountUpdated(newAccount: newAccount); + } + + TreasuryPortionUpdated treasuryPortionUpdated({required int newPortion}) { + return TreasuryPortionUpdated(newPortion: newPortion); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return TreasuryAccountUpdated._decode(input); + case 1: + return TreasuryPortionUpdated._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case TreasuryAccountUpdated: + (value as TreasuryAccountUpdated).encodeTo(output); + break; + case TreasuryPortionUpdated: + (value as TreasuryPortionUpdated).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case TreasuryAccountUpdated: + return (value as TreasuryAccountUpdated)._sizeHint(); + case TreasuryPortionUpdated: + return (value as TreasuryPortionUpdated)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class TreasuryAccountUpdated extends Event { + const TreasuryAccountUpdated({required this.newAccount}); + + factory TreasuryAccountUpdated._decode(_i1.Input input) { + return TreasuryAccountUpdated(newAccount: const _i1.U8ArrayCodec(32).decode(input)); + } + + /// T::AccountId + final _i3.AccountId32 newAccount; + + @override + Map>> toJson() => { + 'TreasuryAccountUpdated': {'newAccount': newAccount.toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(newAccount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(newAccount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TreasuryAccountUpdated && _i4.listsEqual(other.newAccount, newAccount); + + @override + int get hashCode => newAccount.hashCode; +} + +class TreasuryPortionUpdated extends Event { + const TreasuryPortionUpdated({required this.newPortion}); + + factory TreasuryPortionUpdated._decode(_i1.Input input) { + return TreasuryPortionUpdated(newPortion: _i1.U8Codec.codec.decode(input)); + } + + /// u8 + final int newPortion; + + @override + Map> toJson() => { + 'TreasuryPortionUpdated': {'newPortion': newPortion}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(newPortion); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U8Codec.codec.encodeTo(newPortion, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is TreasuryPortionUpdated && other.newPortion == newPortion; + + @override + int get hashCode => newPortion.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/call.dart new file mode 100644 index 00000000..3788829a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/call.dart @@ -0,0 +1,549 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i6; + +import '../../quantus_runtime/origin_caller.dart' as _i4; +import '../../quantus_runtime/runtime_call.dart' as _i3; +import '../../sp_weights/weight_v2/weight.dart' as _i5; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Call { + const $Call(); + + Batch batch({required List<_i3.RuntimeCall> calls}) { + return Batch(calls: calls); + } + + AsDerivative asDerivative({required int index, required _i3.RuntimeCall call}) { + return AsDerivative(index: index, call: call); + } + + BatchAll batchAll({required List<_i3.RuntimeCall> calls}) { + return BatchAll(calls: calls); + } + + DispatchAs dispatchAs({required _i4.OriginCaller asOrigin, required _i3.RuntimeCall call}) { + return DispatchAs(asOrigin: asOrigin, call: call); + } + + ForceBatch forceBatch({required List<_i3.RuntimeCall> calls}) { + return ForceBatch(calls: calls); + } + + WithWeight withWeight({required _i3.RuntimeCall call, required _i5.Weight weight}) { + return WithWeight(call: call, weight: weight); + } + + IfElse ifElse({required _i3.RuntimeCall main, required _i3.RuntimeCall fallback}) { + return IfElse(main: main, fallback: fallback); + } + + DispatchAsFallible dispatchAsFallible({required _i4.OriginCaller asOrigin, required _i3.RuntimeCall call}) { + return DispatchAsFallible(asOrigin: asOrigin, call: call); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Batch._decode(input); + case 1: + return AsDerivative._decode(input); + case 2: + return BatchAll._decode(input); + case 3: + return DispatchAs._decode(input); + case 4: + return ForceBatch._decode(input); + case 5: + return WithWeight._decode(input); + case 6: + return IfElse._decode(input); + case 7: + return DispatchAsFallible._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case Batch: + (value as Batch).encodeTo(output); + break; + case AsDerivative: + (value as AsDerivative).encodeTo(output); + break; + case BatchAll: + (value as BatchAll).encodeTo(output); + break; + case DispatchAs: + (value as DispatchAs).encodeTo(output); + break; + case ForceBatch: + (value as ForceBatch).encodeTo(output); + break; + case WithWeight: + (value as WithWeight).encodeTo(output); + break; + case IfElse: + (value as IfElse).encodeTo(output); + break; + case DispatchAsFallible: + (value as DispatchAsFallible).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case Batch: + return (value as Batch)._sizeHint(); + case AsDerivative: + return (value as AsDerivative)._sizeHint(); + case BatchAll: + return (value as BatchAll)._sizeHint(); + case DispatchAs: + return (value as DispatchAs)._sizeHint(); + case ForceBatch: + return (value as ForceBatch)._sizeHint(); + case WithWeight: + return (value as WithWeight)._sizeHint(); + case IfElse: + return (value as IfElse)._sizeHint(); + case DispatchAsFallible: + return (value as DispatchAsFallible)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Send a batch of dispatch calls. +/// +/// May be called from any origin except `None`. +/// +/// - `calls`: The calls to be dispatched from the same origin. The number of call must not +/// exceed the constant: `batched_calls_limit` (available in constant metadata). +/// +/// If origin is root then the calls are dispatched without checking origin filter. (This +/// includes bypassing `frame_system::Config::BaseCallFilter`). +/// +/// ## Complexity +/// - O(C) where C is the number of calls to be batched. +/// +/// This will return `Ok` in all circumstances. To determine the success of the batch, an +/// event is deposited. If a call failed and the batch was interrupted, then the +/// `BatchInterrupted` event is deposited, along with the number of successful calls made +/// and the error of the failed call. If all were successful, then the `BatchCompleted` +/// event is deposited. +class Batch extends Call { + const Batch({required this.calls}); + + factory Batch._decode(_i1.Input input) { + return Batch(calls: const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).decode(input)); + } + + /// Vec<::RuntimeCall> + final List<_i3.RuntimeCall> calls; + + @override + Map>>>> toJson() => { + 'batch': {'calls': calls.map((value) => value.toJson()).toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).sizeHint(calls); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).encodeTo(calls, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Batch && _i6.listsEqual(other.calls, calls); + + @override + int get hashCode => calls.hashCode; +} + +/// Send a call through an indexed pseudonym of the sender. +/// +/// Filter from origin are passed along. The call will be dispatched with an origin which +/// use the same filter as the origin of this call. +/// +/// NOTE: If you need to ensure that any account-based filtering is not honored (i.e. +/// because you expect `proxy` to have been used prior in the call stack and you do not want +/// the call restrictions to apply to any sub-accounts), then use `as_multi_threshold_1` +/// in the Multisig pallet instead. +/// +/// NOTE: Prior to version *12, this was called `as_limited_sub`. +/// +/// The dispatch origin for this call must be _Signed_. +class AsDerivative extends Call { + const AsDerivative({required this.index, required this.call}); + + factory AsDerivative._decode(_i1.Input input) { + return AsDerivative(index: _i1.U16Codec.codec.decode(input), call: _i3.RuntimeCall.codec.decode(input)); + } + + /// u16 + final int index; + + /// Box<::RuntimeCall> + final _i3.RuntimeCall call; + + @override + Map> toJson() => { + 'as_derivative': {'index': index, 'call': call.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U16Codec.codec.sizeHint(index); + size = size + _i3.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U16Codec.codec.encodeTo(index, output); + _i3.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is AsDerivative && other.index == index && other.call == call; + + @override + int get hashCode => Object.hash(index, call); +} + +/// Send a batch of dispatch calls and atomically execute them. +/// The whole transaction will rollback and fail if any of the calls failed. +/// +/// May be called from any origin except `None`. +/// +/// - `calls`: The calls to be dispatched from the same origin. The number of call must not +/// exceed the constant: `batched_calls_limit` (available in constant metadata). +/// +/// If origin is root then the calls are dispatched without checking origin filter. (This +/// includes bypassing `frame_system::Config::BaseCallFilter`). +/// +/// ## Complexity +/// - O(C) where C is the number of calls to be batched. +class BatchAll extends Call { + const BatchAll({required this.calls}); + + factory BatchAll._decode(_i1.Input input) { + return BatchAll(calls: const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).decode(input)); + } + + /// Vec<::RuntimeCall> + final List<_i3.RuntimeCall> calls; + + @override + Map>> toJson() => { + 'batch_all': {'calls': calls.map((value) => value.toJson()).toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).sizeHint(calls); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).encodeTo(calls, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is BatchAll && _i6.listsEqual(other.calls, calls); + + @override + int get hashCode => calls.hashCode; +} + +/// Dispatches a function call with a provided origin. +/// +/// The dispatch origin for this call must be _Root_. +/// +/// ## Complexity +/// - O(1). +class DispatchAs extends Call { + const DispatchAs({required this.asOrigin, required this.call}); + + factory DispatchAs._decode(_i1.Input input) { + return DispatchAs(asOrigin: _i4.OriginCaller.codec.decode(input), call: _i3.RuntimeCall.codec.decode(input)); + } + + /// Box + final _i4.OriginCaller asOrigin; + + /// Box<::RuntimeCall> + final _i3.RuntimeCall call; + + @override + Map>> toJson() => { + 'dispatch_as': {'asOrigin': asOrigin.toJson(), 'call': call.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i4.OriginCaller.codec.sizeHint(asOrigin); + size = size + _i3.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i4.OriginCaller.codec.encodeTo(asOrigin, output); + _i3.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is DispatchAs && other.asOrigin == asOrigin && other.call == call; + + @override + int get hashCode => Object.hash(asOrigin, call); +} + +/// Send a batch of dispatch calls. +/// Unlike `batch`, it allows errors and won't interrupt. +/// +/// May be called from any origin except `None`. +/// +/// - `calls`: The calls to be dispatched from the same origin. The number of call must not +/// exceed the constant: `batched_calls_limit` (available in constant metadata). +/// +/// If origin is root then the calls are dispatch without checking origin filter. (This +/// includes bypassing `frame_system::Config::BaseCallFilter`). +/// +/// ## Complexity +/// - O(C) where C is the number of calls to be batched. +class ForceBatch extends Call { + const ForceBatch({required this.calls}); + + factory ForceBatch._decode(_i1.Input input) { + return ForceBatch(calls: const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).decode(input)); + } + + /// Vec<::RuntimeCall> + final List<_i3.RuntimeCall> calls; + + @override + Map>> toJson() => { + 'force_batch': {'calls': calls.map((value) => value.toJson()).toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).sizeHint(calls); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.SequenceCodec<_i3.RuntimeCall>(_i3.RuntimeCall.codec).encodeTo(calls, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ForceBatch && _i6.listsEqual(other.calls, calls); + + @override + int get hashCode => calls.hashCode; +} + +/// Dispatch a function call with a specified weight. +/// +/// This function does not check the weight of the call, and instead allows the +/// Root origin to specify the weight of the call. +/// +/// The dispatch origin for this call must be _Root_. +class WithWeight extends Call { + const WithWeight({required this.call, required this.weight}); + + factory WithWeight._decode(_i1.Input input) { + return WithWeight(call: _i3.RuntimeCall.codec.decode(input), weight: _i5.Weight.codec.decode(input)); + } + + /// Box<::RuntimeCall> + final _i3.RuntimeCall call; + + /// Weight + final _i5.Weight weight; + + @override + Map>> toJson() => { + 'with_weight': {'call': call.toJson(), 'weight': weight.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.RuntimeCall.codec.sizeHint(call); + size = size + _i5.Weight.codec.sizeHint(weight); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i3.RuntimeCall.codec.encodeTo(call, output); + _i5.Weight.codec.encodeTo(weight, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is WithWeight && other.call == call && other.weight == weight; + + @override + int get hashCode => Object.hash(call, weight); +} + +/// Dispatch a fallback call in the event the main call fails to execute. +/// May be called from any origin except `None`. +/// +/// This function first attempts to dispatch the `main` call. +/// If the `main` call fails, the `fallback` is attemted. +/// if the fallback is successfully dispatched, the weights of both calls +/// are accumulated and an event containing the main call error is deposited. +/// +/// In the event of a fallback failure the whole call fails +/// with the weights returned. +/// +/// - `main`: The main call to be dispatched. This is the primary action to execute. +/// - `fallback`: The fallback call to be dispatched in case the `main` call fails. +/// +/// ## Dispatch Logic +/// - If the origin is `root`, both the main and fallback calls are executed without +/// applying any origin filters. +/// - If the origin is not `root`, the origin filter is applied to both the `main` and +/// `fallback` calls. +/// +/// ## Use Case +/// - Some use cases might involve submitting a `batch` type call in either main, fallback +/// or both. +class IfElse extends Call { + const IfElse({required this.main, required this.fallback}); + + factory IfElse._decode(_i1.Input input) { + return IfElse(main: _i3.RuntimeCall.codec.decode(input), fallback: _i3.RuntimeCall.codec.decode(input)); + } + + /// Box<::RuntimeCall> + final _i3.RuntimeCall main; + + /// Box<::RuntimeCall> + final _i3.RuntimeCall fallback; + + @override + Map>> toJson() => { + 'if_else': {'main': main.toJson(), 'fallback': fallback.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.RuntimeCall.codec.sizeHint(main); + size = size + _i3.RuntimeCall.codec.sizeHint(fallback); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i3.RuntimeCall.codec.encodeTo(main, output); + _i3.RuntimeCall.codec.encodeTo(fallback, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is IfElse && other.main == main && other.fallback == fallback; + + @override + int get hashCode => Object.hash(main, fallback); +} + +/// Dispatches a function call with a provided origin. +/// +/// Almost the same as [`Pallet::dispatch_as`] but forwards any error of the inner call. +/// +/// The dispatch origin for this call must be _Root_. +class DispatchAsFallible extends Call { + const DispatchAsFallible({required this.asOrigin, required this.call}); + + factory DispatchAsFallible._decode(_i1.Input input) { + return DispatchAsFallible( + asOrigin: _i4.OriginCaller.codec.decode(input), + call: _i3.RuntimeCall.codec.decode(input), + ); + } + + /// Box + final _i4.OriginCaller asOrigin; + + /// Box<::RuntimeCall> + final _i3.RuntimeCall call; + + @override + Map>> toJson() => { + 'dispatch_as_fallible': {'asOrigin': asOrigin.toJson(), 'call': call.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i4.OriginCaller.codec.sizeHint(asOrigin); + size = size + _i3.RuntimeCall.codec.sizeHint(call); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i4.OriginCaller.codec.encodeTo(asOrigin, output); + _i3.RuntimeCall.codec.encodeTo(call, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is DispatchAsFallible && other.asOrigin == asOrigin && other.call == call; + + @override + int get hashCode => Object.hash(asOrigin, call); +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/error.dart new file mode 100644 index 00000000..61dd7ad9 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/error.dart @@ -0,0 +1,48 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + /// Too many calls batched. + tooManyCalls('TooManyCalls', 0); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.tooManyCalls; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/event.dart new file mode 100644 index 00000000..4f8e5095 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_utility/pallet/event.dart @@ -0,0 +1,381 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../../sp_runtime/dispatch_error.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Event { + const $Event(); + + BatchInterrupted batchInterrupted({required int index, required _i3.DispatchError error}) { + return BatchInterrupted(index: index, error: error); + } + + BatchCompleted batchCompleted() { + return BatchCompleted(); + } + + BatchCompletedWithErrors batchCompletedWithErrors() { + return BatchCompletedWithErrors(); + } + + ItemCompleted itemCompleted() { + return ItemCompleted(); + } + + ItemFailed itemFailed({required _i3.DispatchError error}) { + return ItemFailed(error: error); + } + + DispatchedAs dispatchedAs({required _i1.Result result}) { + return DispatchedAs(result: result); + } + + IfElseMainSuccess ifElseMainSuccess() { + return IfElseMainSuccess(); + } + + IfElseFallbackCalled ifElseFallbackCalled({required _i3.DispatchError mainError}) { + return IfElseFallbackCalled(mainError: mainError); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return BatchInterrupted._decode(input); + case 1: + return const BatchCompleted(); + case 2: + return const BatchCompletedWithErrors(); + case 3: + return const ItemCompleted(); + case 4: + return ItemFailed._decode(input); + case 5: + return DispatchedAs._decode(input); + case 6: + return const IfElseMainSuccess(); + case 7: + return IfElseFallbackCalled._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case BatchInterrupted: + (value as BatchInterrupted).encodeTo(output); + break; + case BatchCompleted: + (value as BatchCompleted).encodeTo(output); + break; + case BatchCompletedWithErrors: + (value as BatchCompletedWithErrors).encodeTo(output); + break; + case ItemCompleted: + (value as ItemCompleted).encodeTo(output); + break; + case ItemFailed: + (value as ItemFailed).encodeTo(output); + break; + case DispatchedAs: + (value as DispatchedAs).encodeTo(output); + break; + case IfElseMainSuccess: + (value as IfElseMainSuccess).encodeTo(output); + break; + case IfElseFallbackCalled: + (value as IfElseFallbackCalled).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case BatchInterrupted: + return (value as BatchInterrupted)._sizeHint(); + case BatchCompleted: + return 1; + case BatchCompletedWithErrors: + return 1; + case ItemCompleted: + return 1; + case ItemFailed: + return (value as ItemFailed)._sizeHint(); + case DispatchedAs: + return (value as DispatchedAs)._sizeHint(); + case IfElseMainSuccess: + return 1; + case IfElseFallbackCalled: + return (value as IfElseFallbackCalled)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Batch of dispatches did not complete fully. Index of first failing dispatch given, as +/// well as the error. +class BatchInterrupted extends Event { + const BatchInterrupted({required this.index, required this.error}); + + factory BatchInterrupted._decode(_i1.Input input) { + return BatchInterrupted(index: _i1.U32Codec.codec.decode(input), error: _i3.DispatchError.codec.decode(input)); + } + + /// u32 + final int index; + + /// DispatchError + final _i3.DispatchError error; + + @override + Map> toJson() => { + 'BatchInterrupted': {'index': index, 'error': error.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(index); + size = size + _i3.DispatchError.codec.sizeHint(error); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(index, output); + _i3.DispatchError.codec.encodeTo(error, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is BatchInterrupted && other.index == index && other.error == error; + + @override + int get hashCode => Object.hash(index, error); +} + +/// Batch of dispatches completed fully with no error. +class BatchCompleted extends Event { + const BatchCompleted(); + + @override + Map toJson() => {'BatchCompleted': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + } + + @override + bool operator ==(Object other) => other is BatchCompleted; + + @override + int get hashCode => runtimeType.hashCode; +} + +/// Batch of dispatches completed but has errors. +class BatchCompletedWithErrors extends Event { + const BatchCompletedWithErrors(); + + @override + Map toJson() => {'BatchCompletedWithErrors': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + } + + @override + bool operator ==(Object other) => other is BatchCompletedWithErrors; + + @override + int get hashCode => runtimeType.hashCode; +} + +/// A single item within a Batch of dispatches has completed with no error. +class ItemCompleted extends Event { + const ItemCompleted(); + + @override + Map toJson() => {'ItemCompleted': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + } + + @override + bool operator ==(Object other) => other is ItemCompleted; + + @override + int get hashCode => runtimeType.hashCode; +} + +/// A single item within a Batch of dispatches has completed with error. +class ItemFailed extends Event { + const ItemFailed({required this.error}); + + factory ItemFailed._decode(_i1.Input input) { + return ItemFailed(error: _i3.DispatchError.codec.decode(input)); + } + + /// DispatchError + final _i3.DispatchError error; + + @override + Map>> toJson() => { + 'ItemFailed': {'error': error.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.DispatchError.codec.sizeHint(error); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i3.DispatchError.codec.encodeTo(error, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ItemFailed && other.error == error; + + @override + int get hashCode => error.hashCode; +} + +/// A call was dispatched. +class DispatchedAs extends Event { + const DispatchedAs({required this.result}); + + factory DispatchedAs._decode(_i1.Input input) { + return DispatchedAs( + result: const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).decode(input), + ); + } + + /// DispatchResult + final _i1.Result result; + + @override + Map>> toJson() => { + 'DispatchedAs': {'result': result.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = + size + + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).sizeHint(result); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.ResultCodec( + _i1.NullCodec.codec, + _i3.DispatchError.codec, + ).encodeTo(result, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is DispatchedAs && other.result == result; + + @override + int get hashCode => result.hashCode; +} + +/// Main call was dispatched. +class IfElseMainSuccess extends Event { + const IfElseMainSuccess(); + + @override + Map toJson() => {'IfElseMainSuccess': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + } + + @override + bool operator ==(Object other) => other is IfElseMainSuccess; + + @override + int get hashCode => runtimeType.hashCode; +} + +/// The fallback call was dispatched. +class IfElseFallbackCalled extends Event { + const IfElseFallbackCalled({required this.mainError}); + + factory IfElseFallbackCalled._decode(_i1.Input input) { + return IfElseFallbackCalled(mainError: _i3.DispatchError.codec.decode(input)); + } + + /// DispatchError + final _i3.DispatchError mainError; + + @override + Map>> toJson() => { + 'IfElseFallbackCalled': {'mainError': mainError.toJson()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i3.DispatchError.codec.sizeHint(mainError); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i3.DispatchError.codec.encodeTo(mainError, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is IfElseFallbackCalled && other.mainError == mainError; + + @override + int get hashCode => mainError.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/call.dart b/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/call.dart new file mode 100644 index 00000000..9f65eaff --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/call.dart @@ -0,0 +1,109 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i3; + +/// Contains a variant per dispatchable extrinsic that this pallet has. +abstract class Call { + const Call(); + + factory Call.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $CallCodec codec = $CallCodec(); + + static const $Call values = $Call(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map>> toJson(); +} + +class $Call { + const $Call(); + + VerifyAggregatedProof verifyAggregatedProof({required List proofBytes}) { + return VerifyAggregatedProof(proofBytes: proofBytes); + } +} + +class $CallCodec with _i1.Codec { + const $CallCodec(); + + @override + Call decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 2: + return VerifyAggregatedProof._decode(input); + default: + throw Exception('Call: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Call value, _i1.Output output) { + switch (value.runtimeType) { + case VerifyAggregatedProof: + (value as VerifyAggregatedProof).encodeTo(output); + break; + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Call value) { + switch (value.runtimeType) { + case VerifyAggregatedProof: + return (value as VerifyAggregatedProof)._sizeHint(); + default: + throw Exception('Call: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +/// Verify an aggregated wormhole proof and process all transfers in the batch +class VerifyAggregatedProof extends Call { + const VerifyAggregatedProof({required this.proofBytes}); + + factory VerifyAggregatedProof._decode(_i1.Input input) { + return VerifyAggregatedProof(proofBytes: _i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List proofBytes; + + @override + Map>> toJson() => { + 'verify_aggregated_proof': {'proofBytes': proofBytes}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(proofBytes); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U8SequenceCodec.codec.encodeTo(proofBytes, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is VerifyAggregatedProof && _i3.listsEqual(other.proofBytes, proofBytes); + + @override + int get hashCode => proofBytes.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/error.dart b/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/error.dart new file mode 100644 index 00000000..c5bb0158 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/error.dart @@ -0,0 +1,96 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +/// The `Error` enum of this pallet. +enum Error { + invalidProof('InvalidProof', 0), + proofDeserializationFailed('ProofDeserializationFailed', 1), + verificationFailed('VerificationFailed', 2), + invalidPublicInputs('InvalidPublicInputs', 3), + nullifierAlreadyUsed('NullifierAlreadyUsed', 4), + verifierNotAvailable('VerifierNotAvailable', 5), + invalidStorageRoot('InvalidStorageRoot', 6), + storageRootMismatch('StorageRootMismatch', 7), + blockNotFound('BlockNotFound', 8), + invalidBlockNumber('InvalidBlockNumber', 9), + aggregatedVerifierNotAvailable('AggregatedVerifierNotAvailable', 10), + aggregatedProofDeserializationFailed('AggregatedProofDeserializationFailed', 11), + aggregatedVerificationFailed('AggregatedVerificationFailed', 12), + invalidAggregatedPublicInputs('InvalidAggregatedPublicInputs', 13), + + /// The volume fee rate in the proof doesn't match the configured rate + invalidVolumeFeeRate('InvalidVolumeFeeRate', 14), + + /// Transfer amount is below the minimum required + transferAmountBelowMinimum('TransferAmountBelowMinimum', 15); + + const Error(this.variantName, this.codecIndex); + + factory Error.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ErrorCodec codec = $ErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ErrorCodec with _i1.Codec { + const $ErrorCodec(); + + @override + Error decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Error.invalidProof; + case 1: + return Error.proofDeserializationFailed; + case 2: + return Error.verificationFailed; + case 3: + return Error.invalidPublicInputs; + case 4: + return Error.nullifierAlreadyUsed; + case 5: + return Error.verifierNotAvailable; + case 6: + return Error.invalidStorageRoot; + case 7: + return Error.storageRootMismatch; + case 8: + return Error.blockNotFound; + case 9: + return Error.invalidBlockNumber; + case 10: + return Error.aggregatedVerifierNotAvailable; + case 11: + return Error.aggregatedProofDeserializationFailed; + case 12: + return Error.aggregatedVerificationFailed; + case 13: + return Error.invalidAggregatedPublicInputs; + case 14: + return Error.invalidVolumeFeeRate; + case 15: + return Error.transferAmountBelowMinimum; + default: + throw Exception('Error: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Error value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/event.dart b/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/event.dart new file mode 100644 index 00000000..67c40449 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/pallet_wormhole/pallet/event.dart @@ -0,0 +1,289 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +/// The `Event` enum of this pallet +abstract class Event { + const Event(); + + factory Event.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EventCodec codec = $EventCodec(); + + static const $Event values = $Event(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $Event { + const $Event(); + + NativeTransferred nativeTransferred({ + required _i3.AccountId32 from, + required _i3.AccountId32 to, + required BigInt amount, + required BigInt transferCount, + }) { + return NativeTransferred(from: from, to: to, amount: amount, transferCount: transferCount); + } + + AssetTransferred assetTransferred({ + required int assetId, + required _i3.AccountId32 from, + required _i3.AccountId32 to, + required BigInt amount, + required BigInt transferCount, + }) { + return AssetTransferred(assetId: assetId, from: from, to: to, amount: amount, transferCount: transferCount); + } + + ProofVerified proofVerified({required BigInt exitAmount, required List> nullifiers}) { + return ProofVerified(exitAmount: exitAmount, nullifiers: nullifiers); + } +} + +class $EventCodec with _i1.Codec { + const $EventCodec(); + + @override + Event decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return NativeTransferred._decode(input); + case 1: + return AssetTransferred._decode(input); + case 2: + return ProofVerified._decode(input); + default: + throw Exception('Event: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Event value, _i1.Output output) { + switch (value.runtimeType) { + case NativeTransferred: + (value as NativeTransferred).encodeTo(output); + break; + case AssetTransferred: + (value as AssetTransferred).encodeTo(output); + break; + case ProofVerified: + (value as ProofVerified).encodeTo(output); + break; + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Event value) { + switch (value.runtimeType) { + case NativeTransferred: + return (value as NativeTransferred)._sizeHint(); + case AssetTransferred: + return (value as AssetTransferred)._sizeHint(); + case ProofVerified: + return (value as ProofVerified)._sizeHint(); + default: + throw Exception('Event: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class NativeTransferred extends Event { + const NativeTransferred({required this.from, required this.to, required this.amount, required this.transferCount}); + + factory NativeTransferred._decode(_i1.Input input) { + return NativeTransferred( + from: const _i1.U8ArrayCodec(32).decode(input), + to: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + transferCount: _i1.U64Codec.codec.decode(input), + ); + } + + /// ::AccountId + final _i3.AccountId32 from; + + /// ::AccountId + final _i3.AccountId32 to; + + /// BalanceOf + final BigInt amount; + + /// T::TransferCount + final BigInt transferCount; + + @override + Map> toJson() => { + 'NativeTransferred': {'from': from.toList(), 'to': to.toList(), 'amount': amount, 'transferCount': transferCount}, + }; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(from); + size = size + const _i3.AccountId32Codec().sizeHint(to); + size = size + _i1.U128Codec.codec.sizeHint(amount); + size = size + _i1.U64Codec.codec.sizeHint(transferCount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(from, output); + const _i1.U8ArrayCodec(32).encodeTo(to, output); + _i1.U128Codec.codec.encodeTo(amount, output); + _i1.U64Codec.codec.encodeTo(transferCount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is NativeTransferred && + _i4.listsEqual(other.from, from) && + _i4.listsEqual(other.to, to) && + other.amount == amount && + other.transferCount == transferCount; + + @override + int get hashCode => Object.hash(from, to, amount, transferCount); +} + +class AssetTransferred extends Event { + const AssetTransferred({ + required this.assetId, + required this.from, + required this.to, + required this.amount, + required this.transferCount, + }); + + factory AssetTransferred._decode(_i1.Input input) { + return AssetTransferred( + assetId: _i1.U32Codec.codec.decode(input), + from: const _i1.U8ArrayCodec(32).decode(input), + to: const _i1.U8ArrayCodec(32).decode(input), + amount: _i1.U128Codec.codec.decode(input), + transferCount: _i1.U64Codec.codec.decode(input), + ); + } + + /// AssetIdOf + final int assetId; + + /// ::AccountId + final _i3.AccountId32 from; + + /// ::AccountId + final _i3.AccountId32 to; + + /// AssetBalanceOf + final BigInt amount; + + /// T::TransferCount + final BigInt transferCount; + + @override + Map> toJson() => { + 'AssetTransferred': { + 'assetId': assetId, + 'from': from.toList(), + 'to': to.toList(), + 'amount': amount, + 'transferCount': transferCount, + }, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(assetId); + size = size + const _i3.AccountId32Codec().sizeHint(from); + size = size + const _i3.AccountId32Codec().sizeHint(to); + size = size + _i1.U128Codec.codec.sizeHint(amount); + size = size + _i1.U64Codec.codec.sizeHint(transferCount); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U32Codec.codec.encodeTo(assetId, output); + const _i1.U8ArrayCodec(32).encodeTo(from, output); + const _i1.U8ArrayCodec(32).encodeTo(to, output); + _i1.U128Codec.codec.encodeTo(amount, output); + _i1.U64Codec.codec.encodeTo(transferCount, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AssetTransferred && + other.assetId == assetId && + _i4.listsEqual(other.from, from) && + _i4.listsEqual(other.to, to) && + other.amount == amount && + other.transferCount == transferCount; + + @override + int get hashCode => Object.hash(assetId, from, to, amount, transferCount); +} + +class ProofVerified extends Event { + const ProofVerified({required this.exitAmount, required this.nullifiers}); + + factory ProofVerified._decode(_i1.Input input) { + return ProofVerified( + exitAmount: _i1.U128Codec.codec.decode(input), + nullifiers: const _i1.SequenceCodec>(_i1.U8ArrayCodec(32)).decode(input), + ); + } + + /// BalanceOf + final BigInt exitAmount; + + /// Vec<[u8; 32]> + final List> nullifiers; + + @override + Map> toJson() => { + 'ProofVerified': {'exitAmount': exitAmount, 'nullifiers': nullifiers.map((value) => value.toList()).toList()}, + }; + + int _sizeHint() { + int size = 1; + size = size + _i1.U128Codec.codec.sizeHint(exitAmount); + size = size + const _i1.SequenceCodec>(_i1.U8ArrayCodec(32)).sizeHint(nullifiers); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U128Codec.codec.encodeTo(exitAmount, output); + const _i1.SequenceCodec>(_i1.U8ArrayCodec(32)).encodeTo(nullifiers, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProofVerified && other.exitAmount == exitAmount && _i4.listsEqual(other.nullifiers, nullifiers); + + @override + int get hashCode => Object.hash(exitAmount, nullifiers); +} diff --git a/quantus_sdk/lib/generated/planck/types/primitive_types/h256.dart b/quantus_sdk/lib/generated/planck/types/primitive_types/h256.dart new file mode 100644 index 00000000..3a26f8c3 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/primitive_types/h256.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef H256 = List; + +class H256Codec with _i1.Codec { + const H256Codec(); + + @override + H256 decode(_i1.Input input) { + return const _i1.U8ArrayCodec(32).decode(input); + } + + @override + void encodeTo(H256 value, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(value, output); + } + + @override + int sizeHint(H256 value) { + return const _i1.U8ArrayCodec(32).sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/primitive_types/u512.dart b/quantus_sdk/lib/generated/planck/types/primitive_types/u512.dart new file mode 100644 index 00000000..92988381 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/primitive_types/u512.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef U512 = List; + +class U512Codec with _i1.Codec { + const U512Codec(); + + @override + U512 decode(_i1.Input input) { + return const _i1.U64ArrayCodec(8).decode(input); + } + + @override + void encodeTo(U512 value, _i1.Output output) { + const _i1.U64ArrayCodec(8).encodeTo(value, output); + } + + @override + int sizeHint(U512 value) { + return const _i1.U64ArrayCodec(8).sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/qp_dilithium_crypto/types/dilithium_signature_scheme.dart b/quantus_sdk/lib/generated/planck/types/qp_dilithium_crypto/types/dilithium_signature_scheme.dart new file mode 100644 index 00000000..4b323353 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/qp_dilithium_crypto/types/dilithium_signature_scheme.dart @@ -0,0 +1,105 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'dilithium_signature_with_public.dart' as _i3; + +abstract class DilithiumSignatureScheme { + const DilithiumSignatureScheme(); + + factory DilithiumSignatureScheme.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $DilithiumSignatureSchemeCodec codec = $DilithiumSignatureSchemeCodec(); + + static const $DilithiumSignatureScheme values = $DilithiumSignatureScheme(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map>> toJson(); +} + +class $DilithiumSignatureScheme { + const $DilithiumSignatureScheme(); + + Dilithium dilithium(_i3.DilithiumSignatureWithPublic value0) { + return Dilithium(value0); + } +} + +class $DilithiumSignatureSchemeCodec with _i1.Codec { + const $DilithiumSignatureSchemeCodec(); + + @override + DilithiumSignatureScheme decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Dilithium._decode(input); + default: + throw Exception('DilithiumSignatureScheme: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(DilithiumSignatureScheme value, _i1.Output output) { + switch (value.runtimeType) { + case Dilithium: + (value as Dilithium).encodeTo(output); + break; + default: + throw Exception('DilithiumSignatureScheme: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(DilithiumSignatureScheme value) { + switch (value.runtimeType) { + case Dilithium: + return (value as Dilithium)._sizeHint(); + default: + throw Exception('DilithiumSignatureScheme: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Dilithium extends DilithiumSignatureScheme { + const Dilithium(this.value0); + + factory Dilithium._decode(_i1.Input input) { + return Dilithium(_i3.DilithiumSignatureWithPublic.codec.decode(input)); + } + + /// DilithiumSignatureWithPublic + final _i3.DilithiumSignatureWithPublic value0; + + @override + Map>> toJson() => {'Dilithium': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.DilithiumSignatureWithPublic.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.DilithiumSignatureWithPublic.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Dilithium && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/qp_dilithium_crypto/types/dilithium_signature_with_public.dart b/quantus_sdk/lib/generated/planck/types/qp_dilithium_crypto/types/dilithium_signature_with_public.dart new file mode 100644 index 00000000..b8b5fa11 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/qp_dilithium_crypto/types/dilithium_signature_with_public.dart @@ -0,0 +1,52 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i3; + +class DilithiumSignatureWithPublic { + const DilithiumSignatureWithPublic({required this.bytes}); + + factory DilithiumSignatureWithPublic.decode(_i1.Input input) { + return codec.decode(input); + } + + /// [u8; DilithiumSignatureWithPublic::TOTAL_LEN] + final List bytes; + + static const $DilithiumSignatureWithPublicCodec codec = $DilithiumSignatureWithPublicCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map> toJson() => {'bytes': bytes.toList()}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is DilithiumSignatureWithPublic && _i3.listsEqual(other.bytes, bytes); + + @override + int get hashCode => bytes.hashCode; +} + +class $DilithiumSignatureWithPublicCodec with _i1.Codec { + const $DilithiumSignatureWithPublicCodec(); + + @override + void encodeTo(DilithiumSignatureWithPublic obj, _i1.Output output) { + const _i1.U8ArrayCodec(7219).encodeTo(obj.bytes, output); + } + + @override + DilithiumSignatureWithPublic decode(_i1.Input input) { + return DilithiumSignatureWithPublic(bytes: const _i1.U8ArrayCodec(7219).decode(input)); + } + + @override + int sizeHint(DilithiumSignatureWithPublic obj) { + int size = 0; + size = size + const _i1.U8ArrayCodec(7219).sizeHint(obj.bytes); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/qp_poseidon/poseidon_hasher.dart b/quantus_sdk/lib/generated/planck/types/qp_poseidon/poseidon_hasher.dart new file mode 100644 index 00000000..50302afc --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/qp_poseidon/poseidon_hasher.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef PoseidonHasher = dynamic; + +class PoseidonHasherCodec with _i1.Codec { + const PoseidonHasherCodec(); + + @override + PoseidonHasher decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(PoseidonHasher value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(PoseidonHasher value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/qp_scheduler/block_number_or_timestamp.dart b/quantus_sdk/lib/generated/planck/types/qp_scheduler/block_number_or_timestamp.dart new file mode 100644 index 00000000..72570de7 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/qp_scheduler/block_number_or_timestamp.dart @@ -0,0 +1,145 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +abstract class BlockNumberOrTimestamp { + const BlockNumberOrTimestamp(); + + factory BlockNumberOrTimestamp.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $BlockNumberOrTimestampCodec codec = $BlockNumberOrTimestampCodec(); + + static const $BlockNumberOrTimestamp values = $BlockNumberOrTimestamp(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $BlockNumberOrTimestamp { + const $BlockNumberOrTimestamp(); + + BlockNumber blockNumber(int value0) { + return BlockNumber(value0); + } + + Timestamp timestamp(BigInt value0) { + return Timestamp(value0); + } +} + +class $BlockNumberOrTimestampCodec with _i1.Codec { + const $BlockNumberOrTimestampCodec(); + + @override + BlockNumberOrTimestamp decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return BlockNumber._decode(input); + case 1: + return Timestamp._decode(input); + default: + throw Exception('BlockNumberOrTimestamp: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(BlockNumberOrTimestamp value, _i1.Output output) { + switch (value.runtimeType) { + case BlockNumber: + (value as BlockNumber).encodeTo(output); + break; + case Timestamp: + (value as Timestamp).encodeTo(output); + break; + default: + throw Exception('BlockNumberOrTimestamp: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(BlockNumberOrTimestamp value) { + switch (value.runtimeType) { + case BlockNumber: + return (value as BlockNumber)._sizeHint(); + case Timestamp: + return (value as Timestamp)._sizeHint(); + default: + throw Exception('BlockNumberOrTimestamp: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class BlockNumber extends BlockNumberOrTimestamp { + const BlockNumber(this.value0); + + factory BlockNumber._decode(_i1.Input input) { + return BlockNumber(_i1.U32Codec.codec.decode(input)); + } + + /// BlockNumber + final int value0; + + @override + Map toJson() => {'BlockNumber': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is BlockNumber && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Timestamp extends BlockNumberOrTimestamp { + const Timestamp(this.value0); + + factory Timestamp._decode(_i1.Input input) { + return Timestamp(_i1.U64Codec.codec.decode(input)); + } + + /// Moment + final BigInt value0; + + @override + Map toJson() => {'Timestamp': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U64Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U64Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Timestamp && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/qp_scheduler/dispatch_time.dart b/quantus_sdk/lib/generated/planck/types/qp_scheduler/dispatch_time.dart new file mode 100644 index 00000000..ba19d5d2 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/qp_scheduler/dispatch_time.dart @@ -0,0 +1,147 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import 'block_number_or_timestamp.dart' as _i3; + +abstract class DispatchTime { + const DispatchTime(); + + factory DispatchTime.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $DispatchTimeCodec codec = $DispatchTimeCodec(); + + static const $DispatchTime values = $DispatchTime(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $DispatchTime { + const $DispatchTime(); + + At at(int value0) { + return At(value0); + } + + After after(_i3.BlockNumberOrTimestamp value0) { + return After(value0); + } +} + +class $DispatchTimeCodec with _i1.Codec { + const $DispatchTimeCodec(); + + @override + DispatchTime decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return At._decode(input); + case 1: + return After._decode(input); + default: + throw Exception('DispatchTime: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(DispatchTime value, _i1.Output output) { + switch (value.runtimeType) { + case At: + (value as At).encodeTo(output); + break; + case After: + (value as After).encodeTo(output); + break; + default: + throw Exception('DispatchTime: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(DispatchTime value) { + switch (value.runtimeType) { + case At: + return (value as At)._sizeHint(); + case After: + return (value as After)._sizeHint(); + default: + throw Exception('DispatchTime: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class At extends DispatchTime { + const At(this.value0); + + factory At._decode(_i1.Input input) { + return At(_i1.U32Codec.codec.decode(input)); + } + + /// BlockNumber + final int value0; + + @override + Map toJson() => {'At': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U32Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U32Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is At && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class After extends DispatchTime { + const After(this.value0); + + factory After._decode(_i1.Input input) { + return After(_i3.BlockNumberOrTimestamp.codec.decode(input)); + } + + /// BlockNumberOrTimestamp + final _i3.BlockNumberOrTimestamp value0; + + @override + Map> toJson() => {'After': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.BlockNumberOrTimestamp.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i3.BlockNumberOrTimestamp.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is After && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/governance/definitions/preimage_deposit.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/governance/definitions/preimage_deposit.dart new file mode 100644 index 00000000..6bbcc7ef --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/governance/definitions/preimage_deposit.dart @@ -0,0 +1,50 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class PreimageDeposit { + const PreimageDeposit({required this.amount}); + + factory PreimageDeposit.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Balance + final BigInt amount; + + static const $PreimageDepositCodec codec = $PreimageDepositCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'amount': amount}; + + @override + bool operator ==(Object other) => identical(this, other) || other is PreimageDeposit && other.amount == amount; + + @override + int get hashCode => amount.hashCode; +} + +class $PreimageDepositCodec with _i1.Codec { + const $PreimageDepositCodec(); + + @override + void encodeTo(PreimageDeposit obj, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(obj.amount, output); + } + + @override + PreimageDeposit decode(_i1.Input input) { + return PreimageDeposit(amount: _i1.U128Codec.codec.decode(input)); + } + + @override + int sizeHint(PreimageDeposit obj) { + int size = 0; + size = size + _i1.U128Codec.codec.sizeHint(obj.amount); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/origin_caller.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/origin_caller.dart new file mode 100644 index 00000000..44961c9d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/origin_caller.dart @@ -0,0 +1,105 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../frame_support/dispatch/raw_origin.dart' as _i3; + +abstract class OriginCaller { + const OriginCaller(); + + factory OriginCaller.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $OriginCallerCodec codec = $OriginCallerCodec(); + + static const $OriginCaller values = $OriginCaller(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $OriginCaller { + const $OriginCaller(); + + System system(_i3.RawOrigin value0) { + return System(value0); + } +} + +class $OriginCallerCodec with _i1.Codec { + const $OriginCallerCodec(); + + @override + OriginCaller decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return System._decode(input); + default: + throw Exception('OriginCaller: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(OriginCaller value, _i1.Output output) { + switch (value.runtimeType) { + case System: + (value as System).encodeTo(output); + break; + default: + throw Exception('OriginCaller: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(OriginCaller value) { + switch (value.runtimeType) { + case System: + return (value as System)._sizeHint(); + default: + throw Exception('OriginCaller: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class System extends OriginCaller { + const System(this.value0); + + factory System._decode(_i1.Input input) { + return System(_i3.RawOrigin.codec.decode(input)); + } + + /// frame_system::Origin + final _i3.RawOrigin value0; + + @override + Map> toJson() => {'system': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.RawOrigin.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.RawOrigin.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is System && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime.dart new file mode 100644 index 00000000..f16a6bb0 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef Runtime = dynamic; + +class RuntimeCodec with _i1.Codec { + const RuntimeCodec(); + + @override + Runtime decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(Runtime value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(Runtime value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_call.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_call.dart new file mode 100644 index 00000000..ad465d4b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_call.dart @@ -0,0 +1,810 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../frame_system/pallet/call.dart' as _i3; +import '../pallet_assets/pallet/call.dart' as _i17; +import '../pallet_balances/pallet/call.dart' as _i5; +import '../pallet_conviction_voting/pallet/call.dart' as _i12; +import '../pallet_multisig/pallet/call.dart' as _i18; +import '../pallet_preimage/pallet/call.dart' as _i7; +import '../pallet_ranked_collective/pallet/call.dart' as _i13; +import '../pallet_recovery/pallet/call.dart' as _i16; +import '../pallet_referenda/pallet/call_1.dart' as _i10; +import '../pallet_referenda/pallet/call_2.dart' as _i14; +import '../pallet_reversible_transfers/pallet/call.dart' as _i11; +import '../pallet_scheduler/pallet/call.dart' as _i8; +import '../pallet_sudo/pallet/call.dart' as _i6; +import '../pallet_timestamp/pallet/call.dart' as _i4; +import '../pallet_treasury/pallet/call.dart' as _i15; +import '../pallet_utility/pallet/call.dart' as _i9; +import '../pallet_wormhole/pallet/call.dart' as _i19; + +abstract class RuntimeCall { + const RuntimeCall(); + + factory RuntimeCall.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $RuntimeCallCodec codec = $RuntimeCallCodec(); + + static const $RuntimeCall values = $RuntimeCall(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $RuntimeCall { + const $RuntimeCall(); + + System system(_i3.Call value0) { + return System(value0); + } + + Timestamp timestamp(_i4.Call value0) { + return Timestamp(value0); + } + + Balances balances(_i5.Call value0) { + return Balances(value0); + } + + Sudo sudo(_i6.Call value0) { + return Sudo(value0); + } + + Preimage preimage(_i7.Call value0) { + return Preimage(value0); + } + + Scheduler scheduler(_i8.Call value0) { + return Scheduler(value0); + } + + Utility utility(_i9.Call value0) { + return Utility(value0); + } + + Referenda referenda(_i10.Call value0) { + return Referenda(value0); + } + + ReversibleTransfers reversibleTransfers(_i11.Call value0) { + return ReversibleTransfers(value0); + } + + ConvictionVoting convictionVoting(_i12.Call value0) { + return ConvictionVoting(value0); + } + + TechCollective techCollective(_i13.Call value0) { + return TechCollective(value0); + } + + TechReferenda techReferenda(_i14.Call value0) { + return TechReferenda(value0); + } + + TreasuryPallet treasuryPallet(_i15.Call value0) { + return TreasuryPallet(value0); + } + + Recovery recovery(_i16.Call value0) { + return Recovery(value0); + } + + Assets assets(_i17.Call value0) { + return Assets(value0); + } + + Multisig multisig(_i18.Call value0) { + return Multisig(value0); + } + + Wormhole wormhole(_i19.Call value0) { + return Wormhole(value0); + } +} + +class $RuntimeCallCodec with _i1.Codec { + const $RuntimeCallCodec(); + + @override + RuntimeCall decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return System._decode(input); + case 1: + return Timestamp._decode(input); + case 2: + return Balances._decode(input); + case 4: + return Sudo._decode(input); + case 7: + return Preimage._decode(input); + case 8: + return Scheduler._decode(input); + case 9: + return Utility._decode(input); + case 10: + return Referenda._decode(input); + case 11: + return ReversibleTransfers._decode(input); + case 12: + return ConvictionVoting._decode(input); + case 13: + return TechCollective._decode(input); + case 14: + return TechReferenda._decode(input); + case 15: + return TreasuryPallet._decode(input); + case 16: + return Recovery._decode(input); + case 17: + return Assets._decode(input); + case 19: + return Multisig._decode(input); + case 20: + return Wormhole._decode(input); + default: + throw Exception('RuntimeCall: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(RuntimeCall value, _i1.Output output) { + switch (value.runtimeType) { + case System: + (value as System).encodeTo(output); + break; + case Timestamp: + (value as Timestamp).encodeTo(output); + break; + case Balances: + (value as Balances).encodeTo(output); + break; + case Sudo: + (value as Sudo).encodeTo(output); + break; + case Preimage: + (value as Preimage).encodeTo(output); + break; + case Scheduler: + (value as Scheduler).encodeTo(output); + break; + case Utility: + (value as Utility).encodeTo(output); + break; + case Referenda: + (value as Referenda).encodeTo(output); + break; + case ReversibleTransfers: + (value as ReversibleTransfers).encodeTo(output); + break; + case ConvictionVoting: + (value as ConvictionVoting).encodeTo(output); + break; + case TechCollective: + (value as TechCollective).encodeTo(output); + break; + case TechReferenda: + (value as TechReferenda).encodeTo(output); + break; + case TreasuryPallet: + (value as TreasuryPallet).encodeTo(output); + break; + case Recovery: + (value as Recovery).encodeTo(output); + break; + case Assets: + (value as Assets).encodeTo(output); + break; + case Multisig: + (value as Multisig).encodeTo(output); + break; + case Wormhole: + (value as Wormhole).encodeTo(output); + break; + default: + throw Exception('RuntimeCall: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(RuntimeCall value) { + switch (value.runtimeType) { + case System: + return (value as System)._sizeHint(); + case Timestamp: + return (value as Timestamp)._sizeHint(); + case Balances: + return (value as Balances)._sizeHint(); + case Sudo: + return (value as Sudo)._sizeHint(); + case Preimage: + return (value as Preimage)._sizeHint(); + case Scheduler: + return (value as Scheduler)._sizeHint(); + case Utility: + return (value as Utility)._sizeHint(); + case Referenda: + return (value as Referenda)._sizeHint(); + case ReversibleTransfers: + return (value as ReversibleTransfers)._sizeHint(); + case ConvictionVoting: + return (value as ConvictionVoting)._sizeHint(); + case TechCollective: + return (value as TechCollective)._sizeHint(); + case TechReferenda: + return (value as TechReferenda)._sizeHint(); + case TreasuryPallet: + return (value as TreasuryPallet)._sizeHint(); + case Recovery: + return (value as Recovery)._sizeHint(); + case Assets: + return (value as Assets)._sizeHint(); + case Multisig: + return (value as Multisig)._sizeHint(); + case Wormhole: + return (value as Wormhole)._sizeHint(); + default: + throw Exception('RuntimeCall: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class System extends RuntimeCall { + const System(this.value0); + + factory System._decode(_i1.Input input) { + return System(_i3.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i3.Call value0; + + @override + Map>> toJson() => {'System': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is System && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Timestamp extends RuntimeCall { + const Timestamp(this.value0); + + factory Timestamp._decode(_i1.Input input) { + return Timestamp(_i4.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i4.Call value0; + + @override + Map>> toJson() => {'Timestamp': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i4.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i4.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Timestamp && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Balances extends RuntimeCall { + const Balances(this.value0); + + factory Balances._decode(_i1.Input input) { + return Balances(_i5.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i5.Call value0; + + @override + Map>> toJson() => {'Balances': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i5.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i5.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Balances && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Sudo extends RuntimeCall { + const Sudo(this.value0); + + factory Sudo._decode(_i1.Input input) { + return Sudo(_i6.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i6.Call value0; + + @override + Map> toJson() => {'Sudo': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i6.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i6.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Sudo && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Preimage extends RuntimeCall { + const Preimage(this.value0); + + factory Preimage._decode(_i1.Input input) { + return Preimage(_i7.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i7.Call value0; + + @override + Map>>> toJson() => {'Preimage': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i7.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i7.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Preimage && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Scheduler extends RuntimeCall { + const Scheduler(this.value0); + + factory Scheduler._decode(_i1.Input input) { + return Scheduler(_i8.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i8.Call value0; + + @override + Map>> toJson() => {'Scheduler': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i8.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i8.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Scheduler && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Utility extends RuntimeCall { + const Utility(this.value0); + + factory Utility._decode(_i1.Input input) { + return Utility(_i9.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i9.Call value0; + + @override + Map>> toJson() => {'Utility': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i9.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i9.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Utility && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Referenda extends RuntimeCall { + const Referenda(this.value0); + + factory Referenda._decode(_i1.Input input) { + return Referenda(_i10.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i10.Call value0; + + @override + Map>> toJson() => {'Referenda': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i10.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + _i10.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Referenda && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class ReversibleTransfers extends RuntimeCall { + const ReversibleTransfers(this.value0); + + factory ReversibleTransfers._decode(_i1.Input input) { + return ReversibleTransfers(_i11.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i11.Call value0; + + @override + Map>> toJson() => {'ReversibleTransfers': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i11.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i11.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ReversibleTransfers && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class ConvictionVoting extends RuntimeCall { + const ConvictionVoting(this.value0); + + factory ConvictionVoting._decode(_i1.Input input) { + return ConvictionVoting(_i12.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i12.Call value0; + + @override + Map>> toJson() => {'ConvictionVoting': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i12.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + _i12.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ConvictionVoting && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class TechCollective extends RuntimeCall { + const TechCollective(this.value0); + + factory TechCollective._decode(_i1.Input input) { + return TechCollective(_i13.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i13.Call value0; + + @override + Map>> toJson() => {'TechCollective': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i13.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + _i13.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TechCollective && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class TechReferenda extends RuntimeCall { + const TechReferenda(this.value0); + + factory TechReferenda._decode(_i1.Input input) { + return TechReferenda(_i14.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i14.Call value0; + + @override + Map>> toJson() => {'TechReferenda': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i14.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + _i14.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TechReferenda && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class TreasuryPallet extends RuntimeCall { + const TreasuryPallet(this.value0); + + factory TreasuryPallet._decode(_i1.Input input) { + return TreasuryPallet(_i15.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i15.Call value0; + + @override + Map>> toJson() => {'TreasuryPallet': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i15.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(15, output); + _i15.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TreasuryPallet && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Recovery extends RuntimeCall { + const Recovery(this.value0); + + factory Recovery._decode(_i1.Input input) { + return Recovery(_i16.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i16.Call value0; + + @override + Map> toJson() => {'Recovery': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i16.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(16, output); + _i16.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Recovery && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Assets extends RuntimeCall { + const Assets(this.value0); + + factory Assets._decode(_i1.Input input) { + return Assets(_i17.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i17.Call value0; + + @override + Map>> toJson() => {'Assets': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i17.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(17, output); + _i17.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Assets && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Multisig extends RuntimeCall { + const Multisig(this.value0); + + factory Multisig._decode(_i1.Input input) { + return Multisig(_i18.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i18.Call value0; + + @override + Map>> toJson() => {'Multisig': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i18.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(19, output); + _i18.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Multisig && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Wormhole extends RuntimeCall { + const Wormhole(this.value0); + + factory Wormhole._decode(_i1.Input input) { + return Wormhole(_i19.Call.codec.decode(input)); + } + + /// self::sp_api_hidden_includes_construct_runtime::hidden_include::dispatch + ///::CallableCallFor + final _i19.Call value0; + + @override + Map>>> toJson() => {'Wormhole': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i19.Call.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(20, output); + _i19.Call.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Wormhole && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_event.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_event.dart new file mode 100644 index 00000000..c0691e2c --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_event.dart @@ -0,0 +1,922 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../frame_system/pallet/event.dart' as _i3; +import '../pallet_assets/pallet/event.dart' as _i19; +import '../pallet_assets_holder/pallet/event.dart' as _i20; +import '../pallet_balances/pallet/event.dart' as _i4; +import '../pallet_conviction_voting/pallet/event.dart' as _i14; +import '../pallet_mining_rewards/pallet/event.dart' as _i8; +import '../pallet_multisig/pallet/event.dart' as _i21; +import '../pallet_preimage/pallet/event.dart' as _i9; +import '../pallet_qpow/pallet/event.dart' as _i7; +import '../pallet_ranked_collective/pallet/event.dart' as _i15; +import '../pallet_recovery/pallet/event.dart' as _i18; +import '../pallet_referenda/pallet/event_1.dart' as _i12; +import '../pallet_referenda/pallet/event_2.dart' as _i16; +import '../pallet_reversible_transfers/pallet/event.dart' as _i13; +import '../pallet_scheduler/pallet/event.dart' as _i10; +import '../pallet_sudo/pallet/event.dart' as _i6; +import '../pallet_transaction_payment/pallet/event.dart' as _i5; +import '../pallet_treasury/pallet/event.dart' as _i17; +import '../pallet_utility/pallet/event.dart' as _i11; +import '../pallet_wormhole/pallet/event.dart' as _i22; + +abstract class RuntimeEvent { + const RuntimeEvent(); + + factory RuntimeEvent.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $RuntimeEventCodec codec = $RuntimeEventCodec(); + + static const $RuntimeEvent values = $RuntimeEvent(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map> toJson(); +} + +class $RuntimeEvent { + const $RuntimeEvent(); + + System system(_i3.Event value0) { + return System(value0); + } + + Balances balances(_i4.Event value0) { + return Balances(value0); + } + + TransactionPayment transactionPayment(_i5.Event value0) { + return TransactionPayment(value0); + } + + Sudo sudo(_i6.Event value0) { + return Sudo(value0); + } + + QPoW qPoW(_i7.Event value0) { + return QPoW(value0); + } + + MiningRewards miningRewards(_i8.Event value0) { + return MiningRewards(value0); + } + + Preimage preimage(_i9.Event value0) { + return Preimage(value0); + } + + Scheduler scheduler(_i10.Event value0) { + return Scheduler(value0); + } + + Utility utility(_i11.Event value0) { + return Utility(value0); + } + + Referenda referenda(_i12.Event value0) { + return Referenda(value0); + } + + ReversibleTransfers reversibleTransfers(_i13.Event value0) { + return ReversibleTransfers(value0); + } + + ConvictionVoting convictionVoting(_i14.Event value0) { + return ConvictionVoting(value0); + } + + TechCollective techCollective(_i15.Event value0) { + return TechCollective(value0); + } + + TechReferenda techReferenda(_i16.Event value0) { + return TechReferenda(value0); + } + + TreasuryPallet treasuryPallet(_i17.Event value0) { + return TreasuryPallet(value0); + } + + Recovery recovery(_i18.Event value0) { + return Recovery(value0); + } + + Assets assets(_i19.Event value0) { + return Assets(value0); + } + + AssetsHolder assetsHolder(_i20.Event value0) { + return AssetsHolder(value0); + } + + Multisig multisig(_i21.Event value0) { + return Multisig(value0); + } + + Wormhole wormhole(_i22.Event value0) { + return Wormhole(value0); + } +} + +class $RuntimeEventCodec with _i1.Codec { + const $RuntimeEventCodec(); + + @override + RuntimeEvent decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return System._decode(input); + case 2: + return Balances._decode(input); + case 3: + return TransactionPayment._decode(input); + case 4: + return Sudo._decode(input); + case 5: + return QPoW._decode(input); + case 6: + return MiningRewards._decode(input); + case 7: + return Preimage._decode(input); + case 8: + return Scheduler._decode(input); + case 9: + return Utility._decode(input); + case 10: + return Referenda._decode(input); + case 11: + return ReversibleTransfers._decode(input); + case 12: + return ConvictionVoting._decode(input); + case 13: + return TechCollective._decode(input); + case 14: + return TechReferenda._decode(input); + case 15: + return TreasuryPallet._decode(input); + case 16: + return Recovery._decode(input); + case 17: + return Assets._decode(input); + case 18: + return AssetsHolder._decode(input); + case 19: + return Multisig._decode(input); + case 20: + return Wormhole._decode(input); + default: + throw Exception('RuntimeEvent: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(RuntimeEvent value, _i1.Output output) { + switch (value.runtimeType) { + case System: + (value as System).encodeTo(output); + break; + case Balances: + (value as Balances).encodeTo(output); + break; + case TransactionPayment: + (value as TransactionPayment).encodeTo(output); + break; + case Sudo: + (value as Sudo).encodeTo(output); + break; + case QPoW: + (value as QPoW).encodeTo(output); + break; + case MiningRewards: + (value as MiningRewards).encodeTo(output); + break; + case Preimage: + (value as Preimage).encodeTo(output); + break; + case Scheduler: + (value as Scheduler).encodeTo(output); + break; + case Utility: + (value as Utility).encodeTo(output); + break; + case Referenda: + (value as Referenda).encodeTo(output); + break; + case ReversibleTransfers: + (value as ReversibleTransfers).encodeTo(output); + break; + case ConvictionVoting: + (value as ConvictionVoting).encodeTo(output); + break; + case TechCollective: + (value as TechCollective).encodeTo(output); + break; + case TechReferenda: + (value as TechReferenda).encodeTo(output); + break; + case TreasuryPallet: + (value as TreasuryPallet).encodeTo(output); + break; + case Recovery: + (value as Recovery).encodeTo(output); + break; + case Assets: + (value as Assets).encodeTo(output); + break; + case AssetsHolder: + (value as AssetsHolder).encodeTo(output); + break; + case Multisig: + (value as Multisig).encodeTo(output); + break; + case Wormhole: + (value as Wormhole).encodeTo(output); + break; + default: + throw Exception('RuntimeEvent: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(RuntimeEvent value) { + switch (value.runtimeType) { + case System: + return (value as System)._sizeHint(); + case Balances: + return (value as Balances)._sizeHint(); + case TransactionPayment: + return (value as TransactionPayment)._sizeHint(); + case Sudo: + return (value as Sudo)._sizeHint(); + case QPoW: + return (value as QPoW)._sizeHint(); + case MiningRewards: + return (value as MiningRewards)._sizeHint(); + case Preimage: + return (value as Preimage)._sizeHint(); + case Scheduler: + return (value as Scheduler)._sizeHint(); + case Utility: + return (value as Utility)._sizeHint(); + case Referenda: + return (value as Referenda)._sizeHint(); + case ReversibleTransfers: + return (value as ReversibleTransfers)._sizeHint(); + case ConvictionVoting: + return (value as ConvictionVoting)._sizeHint(); + case TechCollective: + return (value as TechCollective)._sizeHint(); + case TechReferenda: + return (value as TechReferenda)._sizeHint(); + case TreasuryPallet: + return (value as TreasuryPallet)._sizeHint(); + case Recovery: + return (value as Recovery)._sizeHint(); + case Assets: + return (value as Assets)._sizeHint(); + case AssetsHolder: + return (value as AssetsHolder)._sizeHint(); + case Multisig: + return (value as Multisig)._sizeHint(); + case Wormhole: + return (value as Wormhole)._sizeHint(); + default: + throw Exception('RuntimeEvent: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class System extends RuntimeEvent { + const System(this.value0); + + factory System._decode(_i1.Input input) { + return System(_i3.Event.codec.decode(input)); + } + + /// frame_system::Event + final _i3.Event value0; + + @override + Map> toJson() => {'System': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i3.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is System && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Balances extends RuntimeEvent { + const Balances(this.value0); + + factory Balances._decode(_i1.Input input) { + return Balances(_i4.Event.codec.decode(input)); + } + + /// pallet_balances::Event + final _i4.Event value0; + + @override + Map>> toJson() => {'Balances': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i4.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i4.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Balances && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class TransactionPayment extends RuntimeEvent { + const TransactionPayment(this.value0); + + factory TransactionPayment._decode(_i1.Input input) { + return TransactionPayment(_i5.Event.codec.decode(input)); + } + + /// pallet_transaction_payment::Event + final _i5.Event value0; + + @override + Map>> toJson() => {'TransactionPayment': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i5.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i5.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TransactionPayment && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Sudo extends RuntimeEvent { + const Sudo(this.value0); + + factory Sudo._decode(_i1.Input input) { + return Sudo(_i6.Event.codec.decode(input)); + } + + /// pallet_sudo::Event + final _i6.Event value0; + + @override + Map> toJson() => {'Sudo': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i6.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i6.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Sudo && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class QPoW extends RuntimeEvent { + const QPoW(this.value0); + + factory QPoW._decode(_i1.Input input) { + return QPoW(_i7.Event.codec.decode(input)); + } + + /// pallet_qpow::Event + final _i7.Event value0; + + @override + Map>> toJson() => {'QPoW': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i7.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i7.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is QPoW && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class MiningRewards extends RuntimeEvent { + const MiningRewards(this.value0); + + factory MiningRewards._decode(_i1.Input input) { + return MiningRewards(_i8.Event.codec.decode(input)); + } + + /// pallet_mining_rewards::Event + final _i8.Event value0; + + @override + Map>> toJson() => {'MiningRewards': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i8.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i8.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is MiningRewards && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Preimage extends RuntimeEvent { + const Preimage(this.value0); + + factory Preimage._decode(_i1.Input input) { + return Preimage(_i9.Event.codec.decode(input)); + } + + /// pallet_preimage::Event + final _i9.Event value0; + + @override + Map>>> toJson() => {'Preimage': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i9.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i9.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Preimage && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Scheduler extends RuntimeEvent { + const Scheduler(this.value0); + + factory Scheduler._decode(_i1.Input input) { + return Scheduler(_i10.Event.codec.decode(input)); + } + + /// pallet_scheduler::Event + final _i10.Event value0; + + @override + Map>> toJson() => {'Scheduler': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i10.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i10.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Scheduler && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Utility extends RuntimeEvent { + const Utility(this.value0); + + factory Utility._decode(_i1.Input input) { + return Utility(_i11.Event.codec.decode(input)); + } + + /// pallet_utility::Event + final _i11.Event value0; + + @override + Map> toJson() => {'Utility': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i11.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i11.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Utility && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Referenda extends RuntimeEvent { + const Referenda(this.value0); + + factory Referenda._decode(_i1.Input input) { + return Referenda(_i12.Event.codec.decode(input)); + } + + /// pallet_referenda::Event + final _i12.Event value0; + + @override + Map>> toJson() => {'Referenda': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i12.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + _i12.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Referenda && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class ReversibleTransfers extends RuntimeEvent { + const ReversibleTransfers(this.value0); + + factory ReversibleTransfers._decode(_i1.Input input) { + return ReversibleTransfers(_i13.Event.codec.decode(input)); + } + + /// pallet_reversible_transfers::Event + final _i13.Event value0; + + @override + Map>> toJson() => {'ReversibleTransfers': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i13.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i13.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ReversibleTransfers && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class ConvictionVoting extends RuntimeEvent { + const ConvictionVoting(this.value0); + + factory ConvictionVoting._decode(_i1.Input input) { + return ConvictionVoting(_i14.Event.codec.decode(input)); + } + + /// pallet_conviction_voting::Event + final _i14.Event value0; + + @override + Map> toJson() => {'ConvictionVoting': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i14.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + _i14.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ConvictionVoting && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class TechCollective extends RuntimeEvent { + const TechCollective(this.value0); + + factory TechCollective._decode(_i1.Input input) { + return TechCollective(_i15.Event.codec.decode(input)); + } + + /// pallet_ranked_collective::Event + final _i15.Event value0; + + @override + Map>> toJson() => {'TechCollective': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i15.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + _i15.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TechCollective && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class TechReferenda extends RuntimeEvent { + const TechReferenda(this.value0); + + factory TechReferenda._decode(_i1.Input input) { + return TechReferenda(_i16.Event.codec.decode(input)); + } + + /// pallet_referenda::Event + final _i16.Event value0; + + @override + Map>> toJson() => {'TechReferenda': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i16.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + _i16.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TechReferenda && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class TreasuryPallet extends RuntimeEvent { + const TreasuryPallet(this.value0); + + factory TreasuryPallet._decode(_i1.Input input) { + return TreasuryPallet(_i17.Event.codec.decode(input)); + } + + /// pallet_treasury::Event + final _i17.Event value0; + + @override + Map>> toJson() => {'TreasuryPallet': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i17.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(15, output); + _i17.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is TreasuryPallet && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Recovery extends RuntimeEvent { + const Recovery(this.value0); + + factory Recovery._decode(_i1.Input input) { + return Recovery(_i18.Event.codec.decode(input)); + } + + /// pallet_recovery::Event + final _i18.Event value0; + + @override + Map>> toJson() => {'Recovery': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i18.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(16, output); + _i18.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Recovery && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Assets extends RuntimeEvent { + const Assets(this.value0); + + factory Assets._decode(_i1.Input input) { + return Assets(_i19.Event.codec.decode(input)); + } + + /// pallet_assets::Event + final _i19.Event value0; + + @override + Map>> toJson() => {'Assets': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i19.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(17, output); + _i19.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Assets && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class AssetsHolder extends RuntimeEvent { + const AssetsHolder(this.value0); + + factory AssetsHolder._decode(_i1.Input input) { + return AssetsHolder(_i20.Event.codec.decode(input)); + } + + /// pallet_assets_holder::Event + final _i20.Event value0; + + @override + Map>> toJson() => {'AssetsHolder': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i20.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(18, output); + _i20.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is AssetsHolder && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Multisig extends RuntimeEvent { + const Multisig(this.value0); + + factory Multisig._decode(_i1.Input input) { + return Multisig(_i21.Event.codec.decode(input)); + } + + /// pallet_multisig::Event + final _i21.Event value0; + + @override + Map>> toJson() => {'Multisig': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i21.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(19, output); + _i21.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Multisig && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Wormhole extends RuntimeEvent { + const Wormhole(this.value0); + + factory Wormhole._decode(_i1.Input input) { + return Wormhole(_i22.Event.codec.decode(input)); + } + + /// pallet_wormhole::Event + final _i22.Event value0; + + @override + Map>> toJson() => {'Wormhole': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i22.Event.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(20, output); + _i22.Event.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Wormhole && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_freeze_reason.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_freeze_reason.dart new file mode 100644 index 00000000..5e4c8b0d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_freeze_reason.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef RuntimeFreezeReason = dynamic; + +class RuntimeFreezeReasonCodec with _i1.Codec { + const RuntimeFreezeReasonCodec(); + + @override + RuntimeFreezeReason decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(RuntimeFreezeReason value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(RuntimeFreezeReason value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_hold_reason.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_hold_reason.dart new file mode 100644 index 00000000..b44c860a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/runtime_hold_reason.dart @@ -0,0 +1,148 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../pallet_preimage/pallet/hold_reason.dart' as _i3; +import '../pallet_reversible_transfers/pallet/hold_reason.dart' as _i4; + +abstract class RuntimeHoldReason { + const RuntimeHoldReason(); + + factory RuntimeHoldReason.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $RuntimeHoldReasonCodec codec = $RuntimeHoldReasonCodec(); + + static const $RuntimeHoldReason values = $RuntimeHoldReason(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $RuntimeHoldReason { + const $RuntimeHoldReason(); + + Preimage preimage(_i3.HoldReason value0) { + return Preimage(value0); + } + + ReversibleTransfers reversibleTransfers(_i4.HoldReason value0) { + return ReversibleTransfers(value0); + } +} + +class $RuntimeHoldReasonCodec with _i1.Codec { + const $RuntimeHoldReasonCodec(); + + @override + RuntimeHoldReason decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 7: + return Preimage._decode(input); + case 11: + return ReversibleTransfers._decode(input); + default: + throw Exception('RuntimeHoldReason: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(RuntimeHoldReason value, _i1.Output output) { + switch (value.runtimeType) { + case Preimage: + (value as Preimage).encodeTo(output); + break; + case ReversibleTransfers: + (value as ReversibleTransfers).encodeTo(output); + break; + default: + throw Exception('RuntimeHoldReason: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(RuntimeHoldReason value) { + switch (value.runtimeType) { + case Preimage: + return (value as Preimage)._sizeHint(); + case ReversibleTransfers: + return (value as ReversibleTransfers)._sizeHint(); + default: + throw Exception('RuntimeHoldReason: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Preimage extends RuntimeHoldReason { + const Preimage(this.value0); + + factory Preimage._decode(_i1.Input input) { + return Preimage(_i3.HoldReason.codec.decode(input)); + } + + /// pallet_preimage::HoldReason + final _i3.HoldReason value0; + + @override + Map toJson() => {'Preimage': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.HoldReason.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i3.HoldReason.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Preimage && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class ReversibleTransfers extends RuntimeHoldReason { + const ReversibleTransfers(this.value0); + + factory ReversibleTransfers._decode(_i1.Input input) { + return ReversibleTransfers(_i4.HoldReason.codec.decode(input)); + } + + /// pallet_reversible_transfers::HoldReason + final _i4.HoldReason value0; + + @override + Map toJson() => {'ReversibleTransfers': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i4.HoldReason.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i4.HoldReason.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is ReversibleTransfers && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/transaction_extensions/reversible_transaction_extension.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/transaction_extensions/reversible_transaction_extension.dart new file mode 100644 index 00000000..1f05a1bb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/transaction_extensions/reversible_transaction_extension.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef ReversibleTransactionExtension = dynamic; + +class ReversibleTransactionExtensionCodec with _i1.Codec { + const ReversibleTransactionExtensionCodec(); + + @override + ReversibleTransactionExtension decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(ReversibleTransactionExtension value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(ReversibleTransactionExtension value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/quantus_runtime/transaction_extensions/wormhole_proof_recorder_extension.dart b/quantus_sdk/lib/generated/planck/types/quantus_runtime/transaction_extensions/wormhole_proof_recorder_extension.dart new file mode 100644 index 00000000..285138a4 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/quantus_runtime/transaction_extensions/wormhole_proof_recorder_extension.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef WormholeProofRecorderExtension = dynamic; + +class WormholeProofRecorderExtensionCodec with _i1.Codec { + const WormholeProofRecorderExtensionCodec(); + + @override + WormholeProofRecorderExtension decode(_i1.Input input) { + return _i1.NullCodec.codec.decode(input); + } + + @override + void encodeTo(WormholeProofRecorderExtension value, _i1.Output output) { + _i1.NullCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(WormholeProofRecorderExtension value) { + return _i1.NullCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_arithmetic/arithmetic_error.dart b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/arithmetic_error.dart new file mode 100644 index 00000000..fce65c07 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/arithmetic_error.dart @@ -0,0 +1,52 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum ArithmeticError { + underflow('Underflow', 0), + overflow('Overflow', 1), + divisionByZero('DivisionByZero', 2); + + const ArithmeticError(this.variantName, this.codecIndex); + + factory ArithmeticError.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $ArithmeticErrorCodec codec = $ArithmeticErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $ArithmeticErrorCodec with _i1.Codec { + const $ArithmeticErrorCodec(); + + @override + ArithmeticError decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return ArithmeticError.underflow; + case 1: + return ArithmeticError.overflow; + case 2: + return ArithmeticError.divisionByZero; + default: + throw Exception('ArithmeticError: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(ArithmeticError value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_arithmetic/fixed_point/fixed_i64.dart b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/fixed_point/fixed_i64.dart new file mode 100644 index 00000000..e542ca16 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/fixed_point/fixed_i64.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef FixedI64 = BigInt; + +class FixedI64Codec with _i1.Codec { + const FixedI64Codec(); + + @override + FixedI64 decode(_i1.Input input) { + return _i1.I64Codec.codec.decode(input); + } + + @override + void encodeTo(FixedI64 value, _i1.Output output) { + _i1.I64Codec.codec.encodeTo(value, output); + } + + @override + int sizeHint(FixedI64 value) { + return _i1.I64Codec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_arithmetic/fixed_point/fixed_u128.dart b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/fixed_point/fixed_u128.dart new file mode 100644 index 00000000..87a788a0 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/fixed_point/fixed_u128.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef FixedU128 = BigInt; + +class FixedU128Codec with _i1.Codec { + const FixedU128Codec(); + + @override + FixedU128 decode(_i1.Input input) { + return _i1.U128Codec.codec.decode(input); + } + + @override + void encodeTo(FixedU128 value, _i1.Output output) { + _i1.U128Codec.codec.encodeTo(value, output); + } + + @override + int sizeHint(FixedU128 value) { + return _i1.U128Codec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_arithmetic/per_things/perbill.dart b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/per_things/perbill.dart new file mode 100644 index 00000000..0f88314b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/per_things/perbill.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef Perbill = int; + +class PerbillCodec with _i1.Codec { + const PerbillCodec(); + + @override + Perbill decode(_i1.Input input) { + return _i1.U32Codec.codec.decode(input); + } + + @override + void encodeTo(Perbill value, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(value, output); + } + + @override + int sizeHint(Perbill value) { + return _i1.U32Codec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_arithmetic/per_things/permill.dart b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/per_things/permill.dart new file mode 100644 index 00000000..7411f90d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_arithmetic/per_things/permill.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef Permill = int; + +class PermillCodec with _i1.Codec { + const PermillCodec(); + + @override + Permill decode(_i1.Input input) { + return _i1.U32Codec.codec.decode(input); + } + + @override + void encodeTo(Permill value, _i1.Output output) { + _i1.U32Codec.codec.encodeTo(value, output); + } + + @override + int sizeHint(Permill value) { + return _i1.U32Codec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_core/crypto/account_id32.dart b/quantus_sdk/lib/generated/planck/types/sp_core/crypto/account_id32.dart new file mode 100644 index 00000000..93fbda9f --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_core/crypto/account_id32.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef AccountId32 = List; + +class AccountId32Codec with _i1.Codec { + const AccountId32Codec(); + + @override + AccountId32 decode(_i1.Input input) { + return const _i1.U8ArrayCodec(32).decode(input); + } + + @override + void encodeTo(AccountId32 value, _i1.Output output) { + const _i1.U8ArrayCodec(32).encodeTo(value, output); + } + + @override + int sizeHint(AccountId32 value) { + return const _i1.U8ArrayCodec(32).sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/dispatch_error.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/dispatch_error.dart new file mode 100644 index 00000000..715b1735 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/dispatch_error.dart @@ -0,0 +1,557 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../sp_arithmetic/arithmetic_error.dart' as _i5; +import 'module_error.dart' as _i3; +import 'proving_trie/trie_error.dart' as _i7; +import 'token_error.dart' as _i4; +import 'transactional_error.dart' as _i6; + +abstract class DispatchError { + const DispatchError(); + + factory DispatchError.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $DispatchErrorCodec codec = $DispatchErrorCodec(); + + static const $DispatchError values = $DispatchError(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $DispatchError { + const $DispatchError(); + + Other other() { + return Other(); + } + + CannotLookup cannotLookup() { + return CannotLookup(); + } + + BadOrigin badOrigin() { + return BadOrigin(); + } + + Module module(_i3.ModuleError value0) { + return Module(value0); + } + + ConsumerRemaining consumerRemaining() { + return ConsumerRemaining(); + } + + NoProviders noProviders() { + return NoProviders(); + } + + TooManyConsumers tooManyConsumers() { + return TooManyConsumers(); + } + + Token token(_i4.TokenError value0) { + return Token(value0); + } + + Arithmetic arithmetic(_i5.ArithmeticError value0) { + return Arithmetic(value0); + } + + Transactional transactional(_i6.TransactionalError value0) { + return Transactional(value0); + } + + Exhausted exhausted() { + return Exhausted(); + } + + Corruption corruption() { + return Corruption(); + } + + Unavailable unavailable() { + return Unavailable(); + } + + RootNotAllowed rootNotAllowed() { + return RootNotAllowed(); + } + + Trie trie(_i7.TrieError value0) { + return Trie(value0); + } +} + +class $DispatchErrorCodec with _i1.Codec { + const $DispatchErrorCodec(); + + @override + DispatchError decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return const Other(); + case 1: + return const CannotLookup(); + case 2: + return const BadOrigin(); + case 3: + return Module._decode(input); + case 4: + return const ConsumerRemaining(); + case 5: + return const NoProviders(); + case 6: + return const TooManyConsumers(); + case 7: + return Token._decode(input); + case 8: + return Arithmetic._decode(input); + case 9: + return Transactional._decode(input); + case 10: + return const Exhausted(); + case 11: + return const Corruption(); + case 12: + return const Unavailable(); + case 13: + return const RootNotAllowed(); + case 14: + return Trie._decode(input); + default: + throw Exception('DispatchError: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(DispatchError value, _i1.Output output) { + switch (value.runtimeType) { + case Other: + (value as Other).encodeTo(output); + break; + case CannotLookup: + (value as CannotLookup).encodeTo(output); + break; + case BadOrigin: + (value as BadOrigin).encodeTo(output); + break; + case Module: + (value as Module).encodeTo(output); + break; + case ConsumerRemaining: + (value as ConsumerRemaining).encodeTo(output); + break; + case NoProviders: + (value as NoProviders).encodeTo(output); + break; + case TooManyConsumers: + (value as TooManyConsumers).encodeTo(output); + break; + case Token: + (value as Token).encodeTo(output); + break; + case Arithmetic: + (value as Arithmetic).encodeTo(output); + break; + case Transactional: + (value as Transactional).encodeTo(output); + break; + case Exhausted: + (value as Exhausted).encodeTo(output); + break; + case Corruption: + (value as Corruption).encodeTo(output); + break; + case Unavailable: + (value as Unavailable).encodeTo(output); + break; + case RootNotAllowed: + (value as RootNotAllowed).encodeTo(output); + break; + case Trie: + (value as Trie).encodeTo(output); + break; + default: + throw Exception('DispatchError: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(DispatchError value) { + switch (value.runtimeType) { + case Other: + return 1; + case CannotLookup: + return 1; + case BadOrigin: + return 1; + case Module: + return (value as Module)._sizeHint(); + case ConsumerRemaining: + return 1; + case NoProviders: + return 1; + case TooManyConsumers: + return 1; + case Token: + return (value as Token)._sizeHint(); + case Arithmetic: + return (value as Arithmetic)._sizeHint(); + case Transactional: + return (value as Transactional)._sizeHint(); + case Exhausted: + return 1; + case Corruption: + return 1; + case Unavailable: + return 1; + case RootNotAllowed: + return 1; + case Trie: + return (value as Trie)._sizeHint(); + default: + throw Exception('DispatchError: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Other extends DispatchError { + const Other(); + + @override + Map toJson() => {'Other': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + } + + @override + bool operator ==(Object other) => other is Other; + + @override + int get hashCode => runtimeType.hashCode; +} + +class CannotLookup extends DispatchError { + const CannotLookup(); + + @override + Map toJson() => {'CannotLookup': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + } + + @override + bool operator ==(Object other) => other is CannotLookup; + + @override + int get hashCode => runtimeType.hashCode; +} + +class BadOrigin extends DispatchError { + const BadOrigin(); + + @override + Map toJson() => {'BadOrigin': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + } + + @override + bool operator ==(Object other) => other is BadOrigin; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Module extends DispatchError { + const Module(this.value0); + + factory Module._decode(_i1.Input input) { + return Module(_i3.ModuleError.codec.decode(input)); + } + + /// ModuleError + final _i3.ModuleError value0; + + @override + Map> toJson() => {'Module': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i3.ModuleError.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i3.ModuleError.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Module && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class ConsumerRemaining extends DispatchError { + const ConsumerRemaining(); + + @override + Map toJson() => {'ConsumerRemaining': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + } + + @override + bool operator ==(Object other) => other is ConsumerRemaining; + + @override + int get hashCode => runtimeType.hashCode; +} + +class NoProviders extends DispatchError { + const NoProviders(); + + @override + Map toJson() => {'NoProviders': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + } + + @override + bool operator ==(Object other) => other is NoProviders; + + @override + int get hashCode => runtimeType.hashCode; +} + +class TooManyConsumers extends DispatchError { + const TooManyConsumers(); + + @override + Map toJson() => {'TooManyConsumers': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + } + + @override + bool operator ==(Object other) => other is TooManyConsumers; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Token extends DispatchError { + const Token(this.value0); + + factory Token._decode(_i1.Input input) { + return Token(_i4.TokenError.codec.decode(input)); + } + + /// TokenError + final _i4.TokenError value0; + + @override + Map toJson() => {'Token': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i4.TokenError.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i4.TokenError.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Token && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Arithmetic extends DispatchError { + const Arithmetic(this.value0); + + factory Arithmetic._decode(_i1.Input input) { + return Arithmetic(_i5.ArithmeticError.codec.decode(input)); + } + + /// ArithmeticError + final _i5.ArithmeticError value0; + + @override + Map toJson() => {'Arithmetic': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i5.ArithmeticError.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i5.ArithmeticError.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Arithmetic && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Transactional extends DispatchError { + const Transactional(this.value0); + + factory Transactional._decode(_i1.Input input) { + return Transactional(_i6.TransactionalError.codec.decode(input)); + } + + /// TransactionalError + final _i6.TransactionalError value0; + + @override + Map toJson() => {'Transactional': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i6.TransactionalError.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i6.TransactionalError.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Transactional && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Exhausted extends DispatchError { + const Exhausted(); + + @override + Map toJson() => {'Exhausted': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + } + + @override + bool operator ==(Object other) => other is Exhausted; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Corruption extends DispatchError { + const Corruption(); + + @override + Map toJson() => {'Corruption': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + } + + @override + bool operator ==(Object other) => other is Corruption; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Unavailable extends DispatchError { + const Unavailable(); + + @override + Map toJson() => {'Unavailable': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + } + + @override + bool operator ==(Object other) => other is Unavailable; + + @override + int get hashCode => runtimeType.hashCode; +} + +class RootNotAllowed extends DispatchError { + const RootNotAllowed(); + + @override + Map toJson() => {'RootNotAllowed': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + } + + @override + bool operator ==(Object other) => other is RootNotAllowed; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Trie extends DispatchError { + const Trie(this.value0); + + factory Trie._decode(_i1.Input input) { + return Trie(_i7.TrieError.codec.decode(input)); + } + + /// TrieError + final _i7.TrieError value0; + + @override + Map toJson() => {'Trie': value0.toJson()}; + + int _sizeHint() { + int size = 1; + size = size + _i7.TrieError.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + _i7.TrieError.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Trie && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/dispatch_error_with_post_info.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/dispatch_error_with_post_info.dart new file mode 100644 index 00000000..cc35e8e0 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/dispatch_error_with_post_info.dart @@ -0,0 +1,63 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; + +import '../frame_support/dispatch/post_dispatch_info.dart' as _i2; +import 'dispatch_error.dart' as _i3; + +class DispatchErrorWithPostInfo { + const DispatchErrorWithPostInfo({required this.postInfo, required this.error}); + + factory DispatchErrorWithPostInfo.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Info + final _i2.PostDispatchInfo postInfo; + + /// DispatchError + final _i3.DispatchError error; + + static const $DispatchErrorWithPostInfoCodec codec = $DispatchErrorWithPostInfoCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map> toJson() => {'postInfo': postInfo.toJson(), 'error': error.toJson()}; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DispatchErrorWithPostInfo && other.postInfo == postInfo && other.error == error; + + @override + int get hashCode => Object.hash(postInfo, error); +} + +class $DispatchErrorWithPostInfoCodec with _i1.Codec { + const $DispatchErrorWithPostInfoCodec(); + + @override + void encodeTo(DispatchErrorWithPostInfo obj, _i1.Output output) { + _i2.PostDispatchInfo.codec.encodeTo(obj.postInfo, output); + _i3.DispatchError.codec.encodeTo(obj.error, output); + } + + @override + DispatchErrorWithPostInfo decode(_i1.Input input) { + return DispatchErrorWithPostInfo( + postInfo: _i2.PostDispatchInfo.codec.decode(input), + error: _i3.DispatchError.codec.decode(input), + ); + } + + @override + int sizeHint(DispatchErrorWithPostInfo obj) { + int size = 0; + size = size + _i2.PostDispatchInfo.codec.sizeHint(obj.postInfo); + size = size + _i3.DispatchError.codec.sizeHint(obj.error); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/digest/digest.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/digest/digest.dart new file mode 100644 index 00000000..1aa48457 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/digest/digest.dart @@ -0,0 +1,53 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i3; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import 'digest_item.dart' as _i2; + +class Digest { + const Digest({required this.logs}); + + factory Digest.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Vec + final List<_i2.DigestItem> logs; + + static const $DigestCodec codec = $DigestCodec(); + + _i3.Uint8List encode() { + return codec.encode(this); + } + + Map>> toJson() => {'logs': logs.map((value) => value.toJson()).toList()}; + + @override + bool operator ==(Object other) => identical(this, other) || other is Digest && _i4.listsEqual(other.logs, logs); + + @override + int get hashCode => logs.hashCode; +} + +class $DigestCodec with _i1.Codec { + const $DigestCodec(); + + @override + void encodeTo(Digest obj, _i1.Output output) { + const _i1.SequenceCodec<_i2.DigestItem>(_i2.DigestItem.codec).encodeTo(obj.logs, output); + } + + @override + Digest decode(_i1.Input input) { + return Digest(logs: const _i1.SequenceCodec<_i2.DigestItem>(_i2.DigestItem.codec).decode(input)); + } + + @override + int sizeHint(Digest obj) { + int size = 0; + size = size + const _i1.SequenceCodec<_i2.DigestItem>(_i2.DigestItem.codec).sizeHint(obj.logs); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/digest/digest_item.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/digest/digest_item.dart new file mode 100644 index 00000000..4ac2501d --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/digest/digest_item.dart @@ -0,0 +1,285 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i3; + +abstract class DigestItem { + const DigestItem(); + + factory DigestItem.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $DigestItemCodec codec = $DigestItemCodec(); + + static const $DigestItem values = $DigestItem(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $DigestItem { + const $DigestItem(); + + PreRuntime preRuntime(List value0, List value1) { + return PreRuntime(value0, value1); + } + + Consensus consensus(List value0, List value1) { + return Consensus(value0, value1); + } + + Seal seal(List value0, List value1) { + return Seal(value0, value1); + } + + Other other(List value0) { + return Other(value0); + } + + RuntimeEnvironmentUpdated runtimeEnvironmentUpdated() { + return RuntimeEnvironmentUpdated(); + } +} + +class $DigestItemCodec with _i1.Codec { + const $DigestItemCodec(); + + @override + DigestItem decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 6: + return PreRuntime._decode(input); + case 4: + return Consensus._decode(input); + case 5: + return Seal._decode(input); + case 0: + return Other._decode(input); + case 8: + return const RuntimeEnvironmentUpdated(); + default: + throw Exception('DigestItem: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(DigestItem value, _i1.Output output) { + switch (value.runtimeType) { + case PreRuntime: + (value as PreRuntime).encodeTo(output); + break; + case Consensus: + (value as Consensus).encodeTo(output); + break; + case Seal: + (value as Seal).encodeTo(output); + break; + case Other: + (value as Other).encodeTo(output); + break; + case RuntimeEnvironmentUpdated: + (value as RuntimeEnvironmentUpdated).encodeTo(output); + break; + default: + throw Exception('DigestItem: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(DigestItem value) { + switch (value.runtimeType) { + case PreRuntime: + return (value as PreRuntime)._sizeHint(); + case Consensus: + return (value as Consensus)._sizeHint(); + case Seal: + return (value as Seal)._sizeHint(); + case Other: + return (value as Other)._sizeHint(); + case RuntimeEnvironmentUpdated: + return 1; + default: + throw Exception('DigestItem: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class PreRuntime extends DigestItem { + const PreRuntime(this.value0, this.value1); + + factory PreRuntime._decode(_i1.Input input) { + return PreRuntime(const _i1.U8ArrayCodec(4).decode(input), _i1.U8SequenceCodec.codec.decode(input)); + } + + /// ConsensusEngineId + final List value0; + + /// Vec + final List value1; + + @override + Map>> toJson() => { + 'PreRuntime': [value0.toList(), value1], + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(4).sizeHint(value0); + size = size + _i1.U8SequenceCodec.codec.sizeHint(value1); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + const _i1.U8ArrayCodec(4).encodeTo(value0, output); + _i1.U8SequenceCodec.codec.encodeTo(value1, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is PreRuntime && _i3.listsEqual(other.value0, value0) && _i3.listsEqual(other.value1, value1); + + @override + int get hashCode => Object.hash(value0, value1); +} + +class Consensus extends DigestItem { + const Consensus(this.value0, this.value1); + + factory Consensus._decode(_i1.Input input) { + return Consensus(const _i1.U8ArrayCodec(4).decode(input), _i1.U8SequenceCodec.codec.decode(input)); + } + + /// ConsensusEngineId + final List value0; + + /// Vec + final List value1; + + @override + Map>> toJson() => { + 'Consensus': [value0.toList(), value1], + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(4).sizeHint(value0); + size = size + _i1.U8SequenceCodec.codec.sizeHint(value1); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(4).encodeTo(value0, output); + _i1.U8SequenceCodec.codec.encodeTo(value1, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Consensus && _i3.listsEqual(other.value0, value0) && _i3.listsEqual(other.value1, value1); + + @override + int get hashCode => Object.hash(value0, value1); +} + +class Seal extends DigestItem { + const Seal(this.value0, this.value1); + + factory Seal._decode(_i1.Input input) { + return Seal(const _i1.U8ArrayCodec(4).decode(input), _i1.U8SequenceCodec.codec.decode(input)); + } + + /// ConsensusEngineId + final List value0; + + /// Vec + final List value1; + + @override + Map>> toJson() => { + 'Seal': [value0.toList(), value1], + }; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(4).sizeHint(value0); + size = size + _i1.U8SequenceCodec.codec.sizeHint(value1); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + const _i1.U8ArrayCodec(4).encodeTo(value0, output); + _i1.U8SequenceCodec.codec.encodeTo(value1, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Seal && _i3.listsEqual(other.value0, value0) && _i3.listsEqual(other.value1, value1); + + @override + int get hashCode => Object.hash(value0, value1); +} + +class Other extends DigestItem { + const Other(this.value0); + + factory Other._decode(_i1.Input input) { + return Other(_i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List value0; + + @override + Map> toJson() => {'Other': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + _i1.U8SequenceCodec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Other && _i3.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} + +class RuntimeEnvironmentUpdated extends DigestItem { + const RuntimeEnvironmentUpdated(); + + @override + Map toJson() => {'RuntimeEnvironmentUpdated': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + } + + @override + bool operator ==(Object other) => other is RuntimeEnvironmentUpdated; + + @override + int get hashCode => runtimeType.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/era/era.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/era/era.dart new file mode 100644 index 00000000..97172381 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/era/era.dart @@ -0,0 +1,10544 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +abstract class Era { + const Era(); + + factory Era.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $EraCodec codec = $EraCodec(); + + static const $Era values = $Era(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $Era { + const $Era(); + + Immortal immortal() { + return Immortal(); + } + + Mortal1 mortal1(int value0) { + return Mortal1(value0); + } + + Mortal2 mortal2(int value0) { + return Mortal2(value0); + } + + Mortal3 mortal3(int value0) { + return Mortal3(value0); + } + + Mortal4 mortal4(int value0) { + return Mortal4(value0); + } + + Mortal5 mortal5(int value0) { + return Mortal5(value0); + } + + Mortal6 mortal6(int value0) { + return Mortal6(value0); + } + + Mortal7 mortal7(int value0) { + return Mortal7(value0); + } + + Mortal8 mortal8(int value0) { + return Mortal8(value0); + } + + Mortal9 mortal9(int value0) { + return Mortal9(value0); + } + + Mortal10 mortal10(int value0) { + return Mortal10(value0); + } + + Mortal11 mortal11(int value0) { + return Mortal11(value0); + } + + Mortal12 mortal12(int value0) { + return Mortal12(value0); + } + + Mortal13 mortal13(int value0) { + return Mortal13(value0); + } + + Mortal14 mortal14(int value0) { + return Mortal14(value0); + } + + Mortal15 mortal15(int value0) { + return Mortal15(value0); + } + + Mortal16 mortal16(int value0) { + return Mortal16(value0); + } + + Mortal17 mortal17(int value0) { + return Mortal17(value0); + } + + Mortal18 mortal18(int value0) { + return Mortal18(value0); + } + + Mortal19 mortal19(int value0) { + return Mortal19(value0); + } + + Mortal20 mortal20(int value0) { + return Mortal20(value0); + } + + Mortal21 mortal21(int value0) { + return Mortal21(value0); + } + + Mortal22 mortal22(int value0) { + return Mortal22(value0); + } + + Mortal23 mortal23(int value0) { + return Mortal23(value0); + } + + Mortal24 mortal24(int value0) { + return Mortal24(value0); + } + + Mortal25 mortal25(int value0) { + return Mortal25(value0); + } + + Mortal26 mortal26(int value0) { + return Mortal26(value0); + } + + Mortal27 mortal27(int value0) { + return Mortal27(value0); + } + + Mortal28 mortal28(int value0) { + return Mortal28(value0); + } + + Mortal29 mortal29(int value0) { + return Mortal29(value0); + } + + Mortal30 mortal30(int value0) { + return Mortal30(value0); + } + + Mortal31 mortal31(int value0) { + return Mortal31(value0); + } + + Mortal32 mortal32(int value0) { + return Mortal32(value0); + } + + Mortal33 mortal33(int value0) { + return Mortal33(value0); + } + + Mortal34 mortal34(int value0) { + return Mortal34(value0); + } + + Mortal35 mortal35(int value0) { + return Mortal35(value0); + } + + Mortal36 mortal36(int value0) { + return Mortal36(value0); + } + + Mortal37 mortal37(int value0) { + return Mortal37(value0); + } + + Mortal38 mortal38(int value0) { + return Mortal38(value0); + } + + Mortal39 mortal39(int value0) { + return Mortal39(value0); + } + + Mortal40 mortal40(int value0) { + return Mortal40(value0); + } + + Mortal41 mortal41(int value0) { + return Mortal41(value0); + } + + Mortal42 mortal42(int value0) { + return Mortal42(value0); + } + + Mortal43 mortal43(int value0) { + return Mortal43(value0); + } + + Mortal44 mortal44(int value0) { + return Mortal44(value0); + } + + Mortal45 mortal45(int value0) { + return Mortal45(value0); + } + + Mortal46 mortal46(int value0) { + return Mortal46(value0); + } + + Mortal47 mortal47(int value0) { + return Mortal47(value0); + } + + Mortal48 mortal48(int value0) { + return Mortal48(value0); + } + + Mortal49 mortal49(int value0) { + return Mortal49(value0); + } + + Mortal50 mortal50(int value0) { + return Mortal50(value0); + } + + Mortal51 mortal51(int value0) { + return Mortal51(value0); + } + + Mortal52 mortal52(int value0) { + return Mortal52(value0); + } + + Mortal53 mortal53(int value0) { + return Mortal53(value0); + } + + Mortal54 mortal54(int value0) { + return Mortal54(value0); + } + + Mortal55 mortal55(int value0) { + return Mortal55(value0); + } + + Mortal56 mortal56(int value0) { + return Mortal56(value0); + } + + Mortal57 mortal57(int value0) { + return Mortal57(value0); + } + + Mortal58 mortal58(int value0) { + return Mortal58(value0); + } + + Mortal59 mortal59(int value0) { + return Mortal59(value0); + } + + Mortal60 mortal60(int value0) { + return Mortal60(value0); + } + + Mortal61 mortal61(int value0) { + return Mortal61(value0); + } + + Mortal62 mortal62(int value0) { + return Mortal62(value0); + } + + Mortal63 mortal63(int value0) { + return Mortal63(value0); + } + + Mortal64 mortal64(int value0) { + return Mortal64(value0); + } + + Mortal65 mortal65(int value0) { + return Mortal65(value0); + } + + Mortal66 mortal66(int value0) { + return Mortal66(value0); + } + + Mortal67 mortal67(int value0) { + return Mortal67(value0); + } + + Mortal68 mortal68(int value0) { + return Mortal68(value0); + } + + Mortal69 mortal69(int value0) { + return Mortal69(value0); + } + + Mortal70 mortal70(int value0) { + return Mortal70(value0); + } + + Mortal71 mortal71(int value0) { + return Mortal71(value0); + } + + Mortal72 mortal72(int value0) { + return Mortal72(value0); + } + + Mortal73 mortal73(int value0) { + return Mortal73(value0); + } + + Mortal74 mortal74(int value0) { + return Mortal74(value0); + } + + Mortal75 mortal75(int value0) { + return Mortal75(value0); + } + + Mortal76 mortal76(int value0) { + return Mortal76(value0); + } + + Mortal77 mortal77(int value0) { + return Mortal77(value0); + } + + Mortal78 mortal78(int value0) { + return Mortal78(value0); + } + + Mortal79 mortal79(int value0) { + return Mortal79(value0); + } + + Mortal80 mortal80(int value0) { + return Mortal80(value0); + } + + Mortal81 mortal81(int value0) { + return Mortal81(value0); + } + + Mortal82 mortal82(int value0) { + return Mortal82(value0); + } + + Mortal83 mortal83(int value0) { + return Mortal83(value0); + } + + Mortal84 mortal84(int value0) { + return Mortal84(value0); + } + + Mortal85 mortal85(int value0) { + return Mortal85(value0); + } + + Mortal86 mortal86(int value0) { + return Mortal86(value0); + } + + Mortal87 mortal87(int value0) { + return Mortal87(value0); + } + + Mortal88 mortal88(int value0) { + return Mortal88(value0); + } + + Mortal89 mortal89(int value0) { + return Mortal89(value0); + } + + Mortal90 mortal90(int value0) { + return Mortal90(value0); + } + + Mortal91 mortal91(int value0) { + return Mortal91(value0); + } + + Mortal92 mortal92(int value0) { + return Mortal92(value0); + } + + Mortal93 mortal93(int value0) { + return Mortal93(value0); + } + + Mortal94 mortal94(int value0) { + return Mortal94(value0); + } + + Mortal95 mortal95(int value0) { + return Mortal95(value0); + } + + Mortal96 mortal96(int value0) { + return Mortal96(value0); + } + + Mortal97 mortal97(int value0) { + return Mortal97(value0); + } + + Mortal98 mortal98(int value0) { + return Mortal98(value0); + } + + Mortal99 mortal99(int value0) { + return Mortal99(value0); + } + + Mortal100 mortal100(int value0) { + return Mortal100(value0); + } + + Mortal101 mortal101(int value0) { + return Mortal101(value0); + } + + Mortal102 mortal102(int value0) { + return Mortal102(value0); + } + + Mortal103 mortal103(int value0) { + return Mortal103(value0); + } + + Mortal104 mortal104(int value0) { + return Mortal104(value0); + } + + Mortal105 mortal105(int value0) { + return Mortal105(value0); + } + + Mortal106 mortal106(int value0) { + return Mortal106(value0); + } + + Mortal107 mortal107(int value0) { + return Mortal107(value0); + } + + Mortal108 mortal108(int value0) { + return Mortal108(value0); + } + + Mortal109 mortal109(int value0) { + return Mortal109(value0); + } + + Mortal110 mortal110(int value0) { + return Mortal110(value0); + } + + Mortal111 mortal111(int value0) { + return Mortal111(value0); + } + + Mortal112 mortal112(int value0) { + return Mortal112(value0); + } + + Mortal113 mortal113(int value0) { + return Mortal113(value0); + } + + Mortal114 mortal114(int value0) { + return Mortal114(value0); + } + + Mortal115 mortal115(int value0) { + return Mortal115(value0); + } + + Mortal116 mortal116(int value0) { + return Mortal116(value0); + } + + Mortal117 mortal117(int value0) { + return Mortal117(value0); + } + + Mortal118 mortal118(int value0) { + return Mortal118(value0); + } + + Mortal119 mortal119(int value0) { + return Mortal119(value0); + } + + Mortal120 mortal120(int value0) { + return Mortal120(value0); + } + + Mortal121 mortal121(int value0) { + return Mortal121(value0); + } + + Mortal122 mortal122(int value0) { + return Mortal122(value0); + } + + Mortal123 mortal123(int value0) { + return Mortal123(value0); + } + + Mortal124 mortal124(int value0) { + return Mortal124(value0); + } + + Mortal125 mortal125(int value0) { + return Mortal125(value0); + } + + Mortal126 mortal126(int value0) { + return Mortal126(value0); + } + + Mortal127 mortal127(int value0) { + return Mortal127(value0); + } + + Mortal128 mortal128(int value0) { + return Mortal128(value0); + } + + Mortal129 mortal129(int value0) { + return Mortal129(value0); + } + + Mortal130 mortal130(int value0) { + return Mortal130(value0); + } + + Mortal131 mortal131(int value0) { + return Mortal131(value0); + } + + Mortal132 mortal132(int value0) { + return Mortal132(value0); + } + + Mortal133 mortal133(int value0) { + return Mortal133(value0); + } + + Mortal134 mortal134(int value0) { + return Mortal134(value0); + } + + Mortal135 mortal135(int value0) { + return Mortal135(value0); + } + + Mortal136 mortal136(int value0) { + return Mortal136(value0); + } + + Mortal137 mortal137(int value0) { + return Mortal137(value0); + } + + Mortal138 mortal138(int value0) { + return Mortal138(value0); + } + + Mortal139 mortal139(int value0) { + return Mortal139(value0); + } + + Mortal140 mortal140(int value0) { + return Mortal140(value0); + } + + Mortal141 mortal141(int value0) { + return Mortal141(value0); + } + + Mortal142 mortal142(int value0) { + return Mortal142(value0); + } + + Mortal143 mortal143(int value0) { + return Mortal143(value0); + } + + Mortal144 mortal144(int value0) { + return Mortal144(value0); + } + + Mortal145 mortal145(int value0) { + return Mortal145(value0); + } + + Mortal146 mortal146(int value0) { + return Mortal146(value0); + } + + Mortal147 mortal147(int value0) { + return Mortal147(value0); + } + + Mortal148 mortal148(int value0) { + return Mortal148(value0); + } + + Mortal149 mortal149(int value0) { + return Mortal149(value0); + } + + Mortal150 mortal150(int value0) { + return Mortal150(value0); + } + + Mortal151 mortal151(int value0) { + return Mortal151(value0); + } + + Mortal152 mortal152(int value0) { + return Mortal152(value0); + } + + Mortal153 mortal153(int value0) { + return Mortal153(value0); + } + + Mortal154 mortal154(int value0) { + return Mortal154(value0); + } + + Mortal155 mortal155(int value0) { + return Mortal155(value0); + } + + Mortal156 mortal156(int value0) { + return Mortal156(value0); + } + + Mortal157 mortal157(int value0) { + return Mortal157(value0); + } + + Mortal158 mortal158(int value0) { + return Mortal158(value0); + } + + Mortal159 mortal159(int value0) { + return Mortal159(value0); + } + + Mortal160 mortal160(int value0) { + return Mortal160(value0); + } + + Mortal161 mortal161(int value0) { + return Mortal161(value0); + } + + Mortal162 mortal162(int value0) { + return Mortal162(value0); + } + + Mortal163 mortal163(int value0) { + return Mortal163(value0); + } + + Mortal164 mortal164(int value0) { + return Mortal164(value0); + } + + Mortal165 mortal165(int value0) { + return Mortal165(value0); + } + + Mortal166 mortal166(int value0) { + return Mortal166(value0); + } + + Mortal167 mortal167(int value0) { + return Mortal167(value0); + } + + Mortal168 mortal168(int value0) { + return Mortal168(value0); + } + + Mortal169 mortal169(int value0) { + return Mortal169(value0); + } + + Mortal170 mortal170(int value0) { + return Mortal170(value0); + } + + Mortal171 mortal171(int value0) { + return Mortal171(value0); + } + + Mortal172 mortal172(int value0) { + return Mortal172(value0); + } + + Mortal173 mortal173(int value0) { + return Mortal173(value0); + } + + Mortal174 mortal174(int value0) { + return Mortal174(value0); + } + + Mortal175 mortal175(int value0) { + return Mortal175(value0); + } + + Mortal176 mortal176(int value0) { + return Mortal176(value0); + } + + Mortal177 mortal177(int value0) { + return Mortal177(value0); + } + + Mortal178 mortal178(int value0) { + return Mortal178(value0); + } + + Mortal179 mortal179(int value0) { + return Mortal179(value0); + } + + Mortal180 mortal180(int value0) { + return Mortal180(value0); + } + + Mortal181 mortal181(int value0) { + return Mortal181(value0); + } + + Mortal182 mortal182(int value0) { + return Mortal182(value0); + } + + Mortal183 mortal183(int value0) { + return Mortal183(value0); + } + + Mortal184 mortal184(int value0) { + return Mortal184(value0); + } + + Mortal185 mortal185(int value0) { + return Mortal185(value0); + } + + Mortal186 mortal186(int value0) { + return Mortal186(value0); + } + + Mortal187 mortal187(int value0) { + return Mortal187(value0); + } + + Mortal188 mortal188(int value0) { + return Mortal188(value0); + } + + Mortal189 mortal189(int value0) { + return Mortal189(value0); + } + + Mortal190 mortal190(int value0) { + return Mortal190(value0); + } + + Mortal191 mortal191(int value0) { + return Mortal191(value0); + } + + Mortal192 mortal192(int value0) { + return Mortal192(value0); + } + + Mortal193 mortal193(int value0) { + return Mortal193(value0); + } + + Mortal194 mortal194(int value0) { + return Mortal194(value0); + } + + Mortal195 mortal195(int value0) { + return Mortal195(value0); + } + + Mortal196 mortal196(int value0) { + return Mortal196(value0); + } + + Mortal197 mortal197(int value0) { + return Mortal197(value0); + } + + Mortal198 mortal198(int value0) { + return Mortal198(value0); + } + + Mortal199 mortal199(int value0) { + return Mortal199(value0); + } + + Mortal200 mortal200(int value0) { + return Mortal200(value0); + } + + Mortal201 mortal201(int value0) { + return Mortal201(value0); + } + + Mortal202 mortal202(int value0) { + return Mortal202(value0); + } + + Mortal203 mortal203(int value0) { + return Mortal203(value0); + } + + Mortal204 mortal204(int value0) { + return Mortal204(value0); + } + + Mortal205 mortal205(int value0) { + return Mortal205(value0); + } + + Mortal206 mortal206(int value0) { + return Mortal206(value0); + } + + Mortal207 mortal207(int value0) { + return Mortal207(value0); + } + + Mortal208 mortal208(int value0) { + return Mortal208(value0); + } + + Mortal209 mortal209(int value0) { + return Mortal209(value0); + } + + Mortal210 mortal210(int value0) { + return Mortal210(value0); + } + + Mortal211 mortal211(int value0) { + return Mortal211(value0); + } + + Mortal212 mortal212(int value0) { + return Mortal212(value0); + } + + Mortal213 mortal213(int value0) { + return Mortal213(value0); + } + + Mortal214 mortal214(int value0) { + return Mortal214(value0); + } + + Mortal215 mortal215(int value0) { + return Mortal215(value0); + } + + Mortal216 mortal216(int value0) { + return Mortal216(value0); + } + + Mortal217 mortal217(int value0) { + return Mortal217(value0); + } + + Mortal218 mortal218(int value0) { + return Mortal218(value0); + } + + Mortal219 mortal219(int value0) { + return Mortal219(value0); + } + + Mortal220 mortal220(int value0) { + return Mortal220(value0); + } + + Mortal221 mortal221(int value0) { + return Mortal221(value0); + } + + Mortal222 mortal222(int value0) { + return Mortal222(value0); + } + + Mortal223 mortal223(int value0) { + return Mortal223(value0); + } + + Mortal224 mortal224(int value0) { + return Mortal224(value0); + } + + Mortal225 mortal225(int value0) { + return Mortal225(value0); + } + + Mortal226 mortal226(int value0) { + return Mortal226(value0); + } + + Mortal227 mortal227(int value0) { + return Mortal227(value0); + } + + Mortal228 mortal228(int value0) { + return Mortal228(value0); + } + + Mortal229 mortal229(int value0) { + return Mortal229(value0); + } + + Mortal230 mortal230(int value0) { + return Mortal230(value0); + } + + Mortal231 mortal231(int value0) { + return Mortal231(value0); + } + + Mortal232 mortal232(int value0) { + return Mortal232(value0); + } + + Mortal233 mortal233(int value0) { + return Mortal233(value0); + } + + Mortal234 mortal234(int value0) { + return Mortal234(value0); + } + + Mortal235 mortal235(int value0) { + return Mortal235(value0); + } + + Mortal236 mortal236(int value0) { + return Mortal236(value0); + } + + Mortal237 mortal237(int value0) { + return Mortal237(value0); + } + + Mortal238 mortal238(int value0) { + return Mortal238(value0); + } + + Mortal239 mortal239(int value0) { + return Mortal239(value0); + } + + Mortal240 mortal240(int value0) { + return Mortal240(value0); + } + + Mortal241 mortal241(int value0) { + return Mortal241(value0); + } + + Mortal242 mortal242(int value0) { + return Mortal242(value0); + } + + Mortal243 mortal243(int value0) { + return Mortal243(value0); + } + + Mortal244 mortal244(int value0) { + return Mortal244(value0); + } + + Mortal245 mortal245(int value0) { + return Mortal245(value0); + } + + Mortal246 mortal246(int value0) { + return Mortal246(value0); + } + + Mortal247 mortal247(int value0) { + return Mortal247(value0); + } + + Mortal248 mortal248(int value0) { + return Mortal248(value0); + } + + Mortal249 mortal249(int value0) { + return Mortal249(value0); + } + + Mortal250 mortal250(int value0) { + return Mortal250(value0); + } + + Mortal251 mortal251(int value0) { + return Mortal251(value0); + } + + Mortal252 mortal252(int value0) { + return Mortal252(value0); + } + + Mortal253 mortal253(int value0) { + return Mortal253(value0); + } + + Mortal254 mortal254(int value0) { + return Mortal254(value0); + } + + Mortal255 mortal255(int value0) { + return Mortal255(value0); + } +} + +class $EraCodec with _i1.Codec { + const $EraCodec(); + + @override + Era decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return const Immortal(); + case 1: + return Mortal1._decode(input); + case 2: + return Mortal2._decode(input); + case 3: + return Mortal3._decode(input); + case 4: + return Mortal4._decode(input); + case 5: + return Mortal5._decode(input); + case 6: + return Mortal6._decode(input); + case 7: + return Mortal7._decode(input); + case 8: + return Mortal8._decode(input); + case 9: + return Mortal9._decode(input); + case 10: + return Mortal10._decode(input); + case 11: + return Mortal11._decode(input); + case 12: + return Mortal12._decode(input); + case 13: + return Mortal13._decode(input); + case 14: + return Mortal14._decode(input); + case 15: + return Mortal15._decode(input); + case 16: + return Mortal16._decode(input); + case 17: + return Mortal17._decode(input); + case 18: + return Mortal18._decode(input); + case 19: + return Mortal19._decode(input); + case 20: + return Mortal20._decode(input); + case 21: + return Mortal21._decode(input); + case 22: + return Mortal22._decode(input); + case 23: + return Mortal23._decode(input); + case 24: + return Mortal24._decode(input); + case 25: + return Mortal25._decode(input); + case 26: + return Mortal26._decode(input); + case 27: + return Mortal27._decode(input); + case 28: + return Mortal28._decode(input); + case 29: + return Mortal29._decode(input); + case 30: + return Mortal30._decode(input); + case 31: + return Mortal31._decode(input); + case 32: + return Mortal32._decode(input); + case 33: + return Mortal33._decode(input); + case 34: + return Mortal34._decode(input); + case 35: + return Mortal35._decode(input); + case 36: + return Mortal36._decode(input); + case 37: + return Mortal37._decode(input); + case 38: + return Mortal38._decode(input); + case 39: + return Mortal39._decode(input); + case 40: + return Mortal40._decode(input); + case 41: + return Mortal41._decode(input); + case 42: + return Mortal42._decode(input); + case 43: + return Mortal43._decode(input); + case 44: + return Mortal44._decode(input); + case 45: + return Mortal45._decode(input); + case 46: + return Mortal46._decode(input); + case 47: + return Mortal47._decode(input); + case 48: + return Mortal48._decode(input); + case 49: + return Mortal49._decode(input); + case 50: + return Mortal50._decode(input); + case 51: + return Mortal51._decode(input); + case 52: + return Mortal52._decode(input); + case 53: + return Mortal53._decode(input); + case 54: + return Mortal54._decode(input); + case 55: + return Mortal55._decode(input); + case 56: + return Mortal56._decode(input); + case 57: + return Mortal57._decode(input); + case 58: + return Mortal58._decode(input); + case 59: + return Mortal59._decode(input); + case 60: + return Mortal60._decode(input); + case 61: + return Mortal61._decode(input); + case 62: + return Mortal62._decode(input); + case 63: + return Mortal63._decode(input); + case 64: + return Mortal64._decode(input); + case 65: + return Mortal65._decode(input); + case 66: + return Mortal66._decode(input); + case 67: + return Mortal67._decode(input); + case 68: + return Mortal68._decode(input); + case 69: + return Mortal69._decode(input); + case 70: + return Mortal70._decode(input); + case 71: + return Mortal71._decode(input); + case 72: + return Mortal72._decode(input); + case 73: + return Mortal73._decode(input); + case 74: + return Mortal74._decode(input); + case 75: + return Mortal75._decode(input); + case 76: + return Mortal76._decode(input); + case 77: + return Mortal77._decode(input); + case 78: + return Mortal78._decode(input); + case 79: + return Mortal79._decode(input); + case 80: + return Mortal80._decode(input); + case 81: + return Mortal81._decode(input); + case 82: + return Mortal82._decode(input); + case 83: + return Mortal83._decode(input); + case 84: + return Mortal84._decode(input); + case 85: + return Mortal85._decode(input); + case 86: + return Mortal86._decode(input); + case 87: + return Mortal87._decode(input); + case 88: + return Mortal88._decode(input); + case 89: + return Mortal89._decode(input); + case 90: + return Mortal90._decode(input); + case 91: + return Mortal91._decode(input); + case 92: + return Mortal92._decode(input); + case 93: + return Mortal93._decode(input); + case 94: + return Mortal94._decode(input); + case 95: + return Mortal95._decode(input); + case 96: + return Mortal96._decode(input); + case 97: + return Mortal97._decode(input); + case 98: + return Mortal98._decode(input); + case 99: + return Mortal99._decode(input); + case 100: + return Mortal100._decode(input); + case 101: + return Mortal101._decode(input); + case 102: + return Mortal102._decode(input); + case 103: + return Mortal103._decode(input); + case 104: + return Mortal104._decode(input); + case 105: + return Mortal105._decode(input); + case 106: + return Mortal106._decode(input); + case 107: + return Mortal107._decode(input); + case 108: + return Mortal108._decode(input); + case 109: + return Mortal109._decode(input); + case 110: + return Mortal110._decode(input); + case 111: + return Mortal111._decode(input); + case 112: + return Mortal112._decode(input); + case 113: + return Mortal113._decode(input); + case 114: + return Mortal114._decode(input); + case 115: + return Mortal115._decode(input); + case 116: + return Mortal116._decode(input); + case 117: + return Mortal117._decode(input); + case 118: + return Mortal118._decode(input); + case 119: + return Mortal119._decode(input); + case 120: + return Mortal120._decode(input); + case 121: + return Mortal121._decode(input); + case 122: + return Mortal122._decode(input); + case 123: + return Mortal123._decode(input); + case 124: + return Mortal124._decode(input); + case 125: + return Mortal125._decode(input); + case 126: + return Mortal126._decode(input); + case 127: + return Mortal127._decode(input); + case 128: + return Mortal128._decode(input); + case 129: + return Mortal129._decode(input); + case 130: + return Mortal130._decode(input); + case 131: + return Mortal131._decode(input); + case 132: + return Mortal132._decode(input); + case 133: + return Mortal133._decode(input); + case 134: + return Mortal134._decode(input); + case 135: + return Mortal135._decode(input); + case 136: + return Mortal136._decode(input); + case 137: + return Mortal137._decode(input); + case 138: + return Mortal138._decode(input); + case 139: + return Mortal139._decode(input); + case 140: + return Mortal140._decode(input); + case 141: + return Mortal141._decode(input); + case 142: + return Mortal142._decode(input); + case 143: + return Mortal143._decode(input); + case 144: + return Mortal144._decode(input); + case 145: + return Mortal145._decode(input); + case 146: + return Mortal146._decode(input); + case 147: + return Mortal147._decode(input); + case 148: + return Mortal148._decode(input); + case 149: + return Mortal149._decode(input); + case 150: + return Mortal150._decode(input); + case 151: + return Mortal151._decode(input); + case 152: + return Mortal152._decode(input); + case 153: + return Mortal153._decode(input); + case 154: + return Mortal154._decode(input); + case 155: + return Mortal155._decode(input); + case 156: + return Mortal156._decode(input); + case 157: + return Mortal157._decode(input); + case 158: + return Mortal158._decode(input); + case 159: + return Mortal159._decode(input); + case 160: + return Mortal160._decode(input); + case 161: + return Mortal161._decode(input); + case 162: + return Mortal162._decode(input); + case 163: + return Mortal163._decode(input); + case 164: + return Mortal164._decode(input); + case 165: + return Mortal165._decode(input); + case 166: + return Mortal166._decode(input); + case 167: + return Mortal167._decode(input); + case 168: + return Mortal168._decode(input); + case 169: + return Mortal169._decode(input); + case 170: + return Mortal170._decode(input); + case 171: + return Mortal171._decode(input); + case 172: + return Mortal172._decode(input); + case 173: + return Mortal173._decode(input); + case 174: + return Mortal174._decode(input); + case 175: + return Mortal175._decode(input); + case 176: + return Mortal176._decode(input); + case 177: + return Mortal177._decode(input); + case 178: + return Mortal178._decode(input); + case 179: + return Mortal179._decode(input); + case 180: + return Mortal180._decode(input); + case 181: + return Mortal181._decode(input); + case 182: + return Mortal182._decode(input); + case 183: + return Mortal183._decode(input); + case 184: + return Mortal184._decode(input); + case 185: + return Mortal185._decode(input); + case 186: + return Mortal186._decode(input); + case 187: + return Mortal187._decode(input); + case 188: + return Mortal188._decode(input); + case 189: + return Mortal189._decode(input); + case 190: + return Mortal190._decode(input); + case 191: + return Mortal191._decode(input); + case 192: + return Mortal192._decode(input); + case 193: + return Mortal193._decode(input); + case 194: + return Mortal194._decode(input); + case 195: + return Mortal195._decode(input); + case 196: + return Mortal196._decode(input); + case 197: + return Mortal197._decode(input); + case 198: + return Mortal198._decode(input); + case 199: + return Mortal199._decode(input); + case 200: + return Mortal200._decode(input); + case 201: + return Mortal201._decode(input); + case 202: + return Mortal202._decode(input); + case 203: + return Mortal203._decode(input); + case 204: + return Mortal204._decode(input); + case 205: + return Mortal205._decode(input); + case 206: + return Mortal206._decode(input); + case 207: + return Mortal207._decode(input); + case 208: + return Mortal208._decode(input); + case 209: + return Mortal209._decode(input); + case 210: + return Mortal210._decode(input); + case 211: + return Mortal211._decode(input); + case 212: + return Mortal212._decode(input); + case 213: + return Mortal213._decode(input); + case 214: + return Mortal214._decode(input); + case 215: + return Mortal215._decode(input); + case 216: + return Mortal216._decode(input); + case 217: + return Mortal217._decode(input); + case 218: + return Mortal218._decode(input); + case 219: + return Mortal219._decode(input); + case 220: + return Mortal220._decode(input); + case 221: + return Mortal221._decode(input); + case 222: + return Mortal222._decode(input); + case 223: + return Mortal223._decode(input); + case 224: + return Mortal224._decode(input); + case 225: + return Mortal225._decode(input); + case 226: + return Mortal226._decode(input); + case 227: + return Mortal227._decode(input); + case 228: + return Mortal228._decode(input); + case 229: + return Mortal229._decode(input); + case 230: + return Mortal230._decode(input); + case 231: + return Mortal231._decode(input); + case 232: + return Mortal232._decode(input); + case 233: + return Mortal233._decode(input); + case 234: + return Mortal234._decode(input); + case 235: + return Mortal235._decode(input); + case 236: + return Mortal236._decode(input); + case 237: + return Mortal237._decode(input); + case 238: + return Mortal238._decode(input); + case 239: + return Mortal239._decode(input); + case 240: + return Mortal240._decode(input); + case 241: + return Mortal241._decode(input); + case 242: + return Mortal242._decode(input); + case 243: + return Mortal243._decode(input); + case 244: + return Mortal244._decode(input); + case 245: + return Mortal245._decode(input); + case 246: + return Mortal246._decode(input); + case 247: + return Mortal247._decode(input); + case 248: + return Mortal248._decode(input); + case 249: + return Mortal249._decode(input); + case 250: + return Mortal250._decode(input); + case 251: + return Mortal251._decode(input); + case 252: + return Mortal252._decode(input); + case 253: + return Mortal253._decode(input); + case 254: + return Mortal254._decode(input); + case 255: + return Mortal255._decode(input); + default: + throw Exception('Era: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(Era value, _i1.Output output) { + switch (value.runtimeType) { + case Immortal: + (value as Immortal).encodeTo(output); + break; + case Mortal1: + (value as Mortal1).encodeTo(output); + break; + case Mortal2: + (value as Mortal2).encodeTo(output); + break; + case Mortal3: + (value as Mortal3).encodeTo(output); + break; + case Mortal4: + (value as Mortal4).encodeTo(output); + break; + case Mortal5: + (value as Mortal5).encodeTo(output); + break; + case Mortal6: + (value as Mortal6).encodeTo(output); + break; + case Mortal7: + (value as Mortal7).encodeTo(output); + break; + case Mortal8: + (value as Mortal8).encodeTo(output); + break; + case Mortal9: + (value as Mortal9).encodeTo(output); + break; + case Mortal10: + (value as Mortal10).encodeTo(output); + break; + case Mortal11: + (value as Mortal11).encodeTo(output); + break; + case Mortal12: + (value as Mortal12).encodeTo(output); + break; + case Mortal13: + (value as Mortal13).encodeTo(output); + break; + case Mortal14: + (value as Mortal14).encodeTo(output); + break; + case Mortal15: + (value as Mortal15).encodeTo(output); + break; + case Mortal16: + (value as Mortal16).encodeTo(output); + break; + case Mortal17: + (value as Mortal17).encodeTo(output); + break; + case Mortal18: + (value as Mortal18).encodeTo(output); + break; + case Mortal19: + (value as Mortal19).encodeTo(output); + break; + case Mortal20: + (value as Mortal20).encodeTo(output); + break; + case Mortal21: + (value as Mortal21).encodeTo(output); + break; + case Mortal22: + (value as Mortal22).encodeTo(output); + break; + case Mortal23: + (value as Mortal23).encodeTo(output); + break; + case Mortal24: + (value as Mortal24).encodeTo(output); + break; + case Mortal25: + (value as Mortal25).encodeTo(output); + break; + case Mortal26: + (value as Mortal26).encodeTo(output); + break; + case Mortal27: + (value as Mortal27).encodeTo(output); + break; + case Mortal28: + (value as Mortal28).encodeTo(output); + break; + case Mortal29: + (value as Mortal29).encodeTo(output); + break; + case Mortal30: + (value as Mortal30).encodeTo(output); + break; + case Mortal31: + (value as Mortal31).encodeTo(output); + break; + case Mortal32: + (value as Mortal32).encodeTo(output); + break; + case Mortal33: + (value as Mortal33).encodeTo(output); + break; + case Mortal34: + (value as Mortal34).encodeTo(output); + break; + case Mortal35: + (value as Mortal35).encodeTo(output); + break; + case Mortal36: + (value as Mortal36).encodeTo(output); + break; + case Mortal37: + (value as Mortal37).encodeTo(output); + break; + case Mortal38: + (value as Mortal38).encodeTo(output); + break; + case Mortal39: + (value as Mortal39).encodeTo(output); + break; + case Mortal40: + (value as Mortal40).encodeTo(output); + break; + case Mortal41: + (value as Mortal41).encodeTo(output); + break; + case Mortal42: + (value as Mortal42).encodeTo(output); + break; + case Mortal43: + (value as Mortal43).encodeTo(output); + break; + case Mortal44: + (value as Mortal44).encodeTo(output); + break; + case Mortal45: + (value as Mortal45).encodeTo(output); + break; + case Mortal46: + (value as Mortal46).encodeTo(output); + break; + case Mortal47: + (value as Mortal47).encodeTo(output); + break; + case Mortal48: + (value as Mortal48).encodeTo(output); + break; + case Mortal49: + (value as Mortal49).encodeTo(output); + break; + case Mortal50: + (value as Mortal50).encodeTo(output); + break; + case Mortal51: + (value as Mortal51).encodeTo(output); + break; + case Mortal52: + (value as Mortal52).encodeTo(output); + break; + case Mortal53: + (value as Mortal53).encodeTo(output); + break; + case Mortal54: + (value as Mortal54).encodeTo(output); + break; + case Mortal55: + (value as Mortal55).encodeTo(output); + break; + case Mortal56: + (value as Mortal56).encodeTo(output); + break; + case Mortal57: + (value as Mortal57).encodeTo(output); + break; + case Mortal58: + (value as Mortal58).encodeTo(output); + break; + case Mortal59: + (value as Mortal59).encodeTo(output); + break; + case Mortal60: + (value as Mortal60).encodeTo(output); + break; + case Mortal61: + (value as Mortal61).encodeTo(output); + break; + case Mortal62: + (value as Mortal62).encodeTo(output); + break; + case Mortal63: + (value as Mortal63).encodeTo(output); + break; + case Mortal64: + (value as Mortal64).encodeTo(output); + break; + case Mortal65: + (value as Mortal65).encodeTo(output); + break; + case Mortal66: + (value as Mortal66).encodeTo(output); + break; + case Mortal67: + (value as Mortal67).encodeTo(output); + break; + case Mortal68: + (value as Mortal68).encodeTo(output); + break; + case Mortal69: + (value as Mortal69).encodeTo(output); + break; + case Mortal70: + (value as Mortal70).encodeTo(output); + break; + case Mortal71: + (value as Mortal71).encodeTo(output); + break; + case Mortal72: + (value as Mortal72).encodeTo(output); + break; + case Mortal73: + (value as Mortal73).encodeTo(output); + break; + case Mortal74: + (value as Mortal74).encodeTo(output); + break; + case Mortal75: + (value as Mortal75).encodeTo(output); + break; + case Mortal76: + (value as Mortal76).encodeTo(output); + break; + case Mortal77: + (value as Mortal77).encodeTo(output); + break; + case Mortal78: + (value as Mortal78).encodeTo(output); + break; + case Mortal79: + (value as Mortal79).encodeTo(output); + break; + case Mortal80: + (value as Mortal80).encodeTo(output); + break; + case Mortal81: + (value as Mortal81).encodeTo(output); + break; + case Mortal82: + (value as Mortal82).encodeTo(output); + break; + case Mortal83: + (value as Mortal83).encodeTo(output); + break; + case Mortal84: + (value as Mortal84).encodeTo(output); + break; + case Mortal85: + (value as Mortal85).encodeTo(output); + break; + case Mortal86: + (value as Mortal86).encodeTo(output); + break; + case Mortal87: + (value as Mortal87).encodeTo(output); + break; + case Mortal88: + (value as Mortal88).encodeTo(output); + break; + case Mortal89: + (value as Mortal89).encodeTo(output); + break; + case Mortal90: + (value as Mortal90).encodeTo(output); + break; + case Mortal91: + (value as Mortal91).encodeTo(output); + break; + case Mortal92: + (value as Mortal92).encodeTo(output); + break; + case Mortal93: + (value as Mortal93).encodeTo(output); + break; + case Mortal94: + (value as Mortal94).encodeTo(output); + break; + case Mortal95: + (value as Mortal95).encodeTo(output); + break; + case Mortal96: + (value as Mortal96).encodeTo(output); + break; + case Mortal97: + (value as Mortal97).encodeTo(output); + break; + case Mortal98: + (value as Mortal98).encodeTo(output); + break; + case Mortal99: + (value as Mortal99).encodeTo(output); + break; + case Mortal100: + (value as Mortal100).encodeTo(output); + break; + case Mortal101: + (value as Mortal101).encodeTo(output); + break; + case Mortal102: + (value as Mortal102).encodeTo(output); + break; + case Mortal103: + (value as Mortal103).encodeTo(output); + break; + case Mortal104: + (value as Mortal104).encodeTo(output); + break; + case Mortal105: + (value as Mortal105).encodeTo(output); + break; + case Mortal106: + (value as Mortal106).encodeTo(output); + break; + case Mortal107: + (value as Mortal107).encodeTo(output); + break; + case Mortal108: + (value as Mortal108).encodeTo(output); + break; + case Mortal109: + (value as Mortal109).encodeTo(output); + break; + case Mortal110: + (value as Mortal110).encodeTo(output); + break; + case Mortal111: + (value as Mortal111).encodeTo(output); + break; + case Mortal112: + (value as Mortal112).encodeTo(output); + break; + case Mortal113: + (value as Mortal113).encodeTo(output); + break; + case Mortal114: + (value as Mortal114).encodeTo(output); + break; + case Mortal115: + (value as Mortal115).encodeTo(output); + break; + case Mortal116: + (value as Mortal116).encodeTo(output); + break; + case Mortal117: + (value as Mortal117).encodeTo(output); + break; + case Mortal118: + (value as Mortal118).encodeTo(output); + break; + case Mortal119: + (value as Mortal119).encodeTo(output); + break; + case Mortal120: + (value as Mortal120).encodeTo(output); + break; + case Mortal121: + (value as Mortal121).encodeTo(output); + break; + case Mortal122: + (value as Mortal122).encodeTo(output); + break; + case Mortal123: + (value as Mortal123).encodeTo(output); + break; + case Mortal124: + (value as Mortal124).encodeTo(output); + break; + case Mortal125: + (value as Mortal125).encodeTo(output); + break; + case Mortal126: + (value as Mortal126).encodeTo(output); + break; + case Mortal127: + (value as Mortal127).encodeTo(output); + break; + case Mortal128: + (value as Mortal128).encodeTo(output); + break; + case Mortal129: + (value as Mortal129).encodeTo(output); + break; + case Mortal130: + (value as Mortal130).encodeTo(output); + break; + case Mortal131: + (value as Mortal131).encodeTo(output); + break; + case Mortal132: + (value as Mortal132).encodeTo(output); + break; + case Mortal133: + (value as Mortal133).encodeTo(output); + break; + case Mortal134: + (value as Mortal134).encodeTo(output); + break; + case Mortal135: + (value as Mortal135).encodeTo(output); + break; + case Mortal136: + (value as Mortal136).encodeTo(output); + break; + case Mortal137: + (value as Mortal137).encodeTo(output); + break; + case Mortal138: + (value as Mortal138).encodeTo(output); + break; + case Mortal139: + (value as Mortal139).encodeTo(output); + break; + case Mortal140: + (value as Mortal140).encodeTo(output); + break; + case Mortal141: + (value as Mortal141).encodeTo(output); + break; + case Mortal142: + (value as Mortal142).encodeTo(output); + break; + case Mortal143: + (value as Mortal143).encodeTo(output); + break; + case Mortal144: + (value as Mortal144).encodeTo(output); + break; + case Mortal145: + (value as Mortal145).encodeTo(output); + break; + case Mortal146: + (value as Mortal146).encodeTo(output); + break; + case Mortal147: + (value as Mortal147).encodeTo(output); + break; + case Mortal148: + (value as Mortal148).encodeTo(output); + break; + case Mortal149: + (value as Mortal149).encodeTo(output); + break; + case Mortal150: + (value as Mortal150).encodeTo(output); + break; + case Mortal151: + (value as Mortal151).encodeTo(output); + break; + case Mortal152: + (value as Mortal152).encodeTo(output); + break; + case Mortal153: + (value as Mortal153).encodeTo(output); + break; + case Mortal154: + (value as Mortal154).encodeTo(output); + break; + case Mortal155: + (value as Mortal155).encodeTo(output); + break; + case Mortal156: + (value as Mortal156).encodeTo(output); + break; + case Mortal157: + (value as Mortal157).encodeTo(output); + break; + case Mortal158: + (value as Mortal158).encodeTo(output); + break; + case Mortal159: + (value as Mortal159).encodeTo(output); + break; + case Mortal160: + (value as Mortal160).encodeTo(output); + break; + case Mortal161: + (value as Mortal161).encodeTo(output); + break; + case Mortal162: + (value as Mortal162).encodeTo(output); + break; + case Mortal163: + (value as Mortal163).encodeTo(output); + break; + case Mortal164: + (value as Mortal164).encodeTo(output); + break; + case Mortal165: + (value as Mortal165).encodeTo(output); + break; + case Mortal166: + (value as Mortal166).encodeTo(output); + break; + case Mortal167: + (value as Mortal167).encodeTo(output); + break; + case Mortal168: + (value as Mortal168).encodeTo(output); + break; + case Mortal169: + (value as Mortal169).encodeTo(output); + break; + case Mortal170: + (value as Mortal170).encodeTo(output); + break; + case Mortal171: + (value as Mortal171).encodeTo(output); + break; + case Mortal172: + (value as Mortal172).encodeTo(output); + break; + case Mortal173: + (value as Mortal173).encodeTo(output); + break; + case Mortal174: + (value as Mortal174).encodeTo(output); + break; + case Mortal175: + (value as Mortal175).encodeTo(output); + break; + case Mortal176: + (value as Mortal176).encodeTo(output); + break; + case Mortal177: + (value as Mortal177).encodeTo(output); + break; + case Mortal178: + (value as Mortal178).encodeTo(output); + break; + case Mortal179: + (value as Mortal179).encodeTo(output); + break; + case Mortal180: + (value as Mortal180).encodeTo(output); + break; + case Mortal181: + (value as Mortal181).encodeTo(output); + break; + case Mortal182: + (value as Mortal182).encodeTo(output); + break; + case Mortal183: + (value as Mortal183).encodeTo(output); + break; + case Mortal184: + (value as Mortal184).encodeTo(output); + break; + case Mortal185: + (value as Mortal185).encodeTo(output); + break; + case Mortal186: + (value as Mortal186).encodeTo(output); + break; + case Mortal187: + (value as Mortal187).encodeTo(output); + break; + case Mortal188: + (value as Mortal188).encodeTo(output); + break; + case Mortal189: + (value as Mortal189).encodeTo(output); + break; + case Mortal190: + (value as Mortal190).encodeTo(output); + break; + case Mortal191: + (value as Mortal191).encodeTo(output); + break; + case Mortal192: + (value as Mortal192).encodeTo(output); + break; + case Mortal193: + (value as Mortal193).encodeTo(output); + break; + case Mortal194: + (value as Mortal194).encodeTo(output); + break; + case Mortal195: + (value as Mortal195).encodeTo(output); + break; + case Mortal196: + (value as Mortal196).encodeTo(output); + break; + case Mortal197: + (value as Mortal197).encodeTo(output); + break; + case Mortal198: + (value as Mortal198).encodeTo(output); + break; + case Mortal199: + (value as Mortal199).encodeTo(output); + break; + case Mortal200: + (value as Mortal200).encodeTo(output); + break; + case Mortal201: + (value as Mortal201).encodeTo(output); + break; + case Mortal202: + (value as Mortal202).encodeTo(output); + break; + case Mortal203: + (value as Mortal203).encodeTo(output); + break; + case Mortal204: + (value as Mortal204).encodeTo(output); + break; + case Mortal205: + (value as Mortal205).encodeTo(output); + break; + case Mortal206: + (value as Mortal206).encodeTo(output); + break; + case Mortal207: + (value as Mortal207).encodeTo(output); + break; + case Mortal208: + (value as Mortal208).encodeTo(output); + break; + case Mortal209: + (value as Mortal209).encodeTo(output); + break; + case Mortal210: + (value as Mortal210).encodeTo(output); + break; + case Mortal211: + (value as Mortal211).encodeTo(output); + break; + case Mortal212: + (value as Mortal212).encodeTo(output); + break; + case Mortal213: + (value as Mortal213).encodeTo(output); + break; + case Mortal214: + (value as Mortal214).encodeTo(output); + break; + case Mortal215: + (value as Mortal215).encodeTo(output); + break; + case Mortal216: + (value as Mortal216).encodeTo(output); + break; + case Mortal217: + (value as Mortal217).encodeTo(output); + break; + case Mortal218: + (value as Mortal218).encodeTo(output); + break; + case Mortal219: + (value as Mortal219).encodeTo(output); + break; + case Mortal220: + (value as Mortal220).encodeTo(output); + break; + case Mortal221: + (value as Mortal221).encodeTo(output); + break; + case Mortal222: + (value as Mortal222).encodeTo(output); + break; + case Mortal223: + (value as Mortal223).encodeTo(output); + break; + case Mortal224: + (value as Mortal224).encodeTo(output); + break; + case Mortal225: + (value as Mortal225).encodeTo(output); + break; + case Mortal226: + (value as Mortal226).encodeTo(output); + break; + case Mortal227: + (value as Mortal227).encodeTo(output); + break; + case Mortal228: + (value as Mortal228).encodeTo(output); + break; + case Mortal229: + (value as Mortal229).encodeTo(output); + break; + case Mortal230: + (value as Mortal230).encodeTo(output); + break; + case Mortal231: + (value as Mortal231).encodeTo(output); + break; + case Mortal232: + (value as Mortal232).encodeTo(output); + break; + case Mortal233: + (value as Mortal233).encodeTo(output); + break; + case Mortal234: + (value as Mortal234).encodeTo(output); + break; + case Mortal235: + (value as Mortal235).encodeTo(output); + break; + case Mortal236: + (value as Mortal236).encodeTo(output); + break; + case Mortal237: + (value as Mortal237).encodeTo(output); + break; + case Mortal238: + (value as Mortal238).encodeTo(output); + break; + case Mortal239: + (value as Mortal239).encodeTo(output); + break; + case Mortal240: + (value as Mortal240).encodeTo(output); + break; + case Mortal241: + (value as Mortal241).encodeTo(output); + break; + case Mortal242: + (value as Mortal242).encodeTo(output); + break; + case Mortal243: + (value as Mortal243).encodeTo(output); + break; + case Mortal244: + (value as Mortal244).encodeTo(output); + break; + case Mortal245: + (value as Mortal245).encodeTo(output); + break; + case Mortal246: + (value as Mortal246).encodeTo(output); + break; + case Mortal247: + (value as Mortal247).encodeTo(output); + break; + case Mortal248: + (value as Mortal248).encodeTo(output); + break; + case Mortal249: + (value as Mortal249).encodeTo(output); + break; + case Mortal250: + (value as Mortal250).encodeTo(output); + break; + case Mortal251: + (value as Mortal251).encodeTo(output); + break; + case Mortal252: + (value as Mortal252).encodeTo(output); + break; + case Mortal253: + (value as Mortal253).encodeTo(output); + break; + case Mortal254: + (value as Mortal254).encodeTo(output); + break; + case Mortal255: + (value as Mortal255).encodeTo(output); + break; + default: + throw Exception('Era: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(Era value) { + switch (value.runtimeType) { + case Immortal: + return 1; + case Mortal1: + return (value as Mortal1)._sizeHint(); + case Mortal2: + return (value as Mortal2)._sizeHint(); + case Mortal3: + return (value as Mortal3)._sizeHint(); + case Mortal4: + return (value as Mortal4)._sizeHint(); + case Mortal5: + return (value as Mortal5)._sizeHint(); + case Mortal6: + return (value as Mortal6)._sizeHint(); + case Mortal7: + return (value as Mortal7)._sizeHint(); + case Mortal8: + return (value as Mortal8)._sizeHint(); + case Mortal9: + return (value as Mortal9)._sizeHint(); + case Mortal10: + return (value as Mortal10)._sizeHint(); + case Mortal11: + return (value as Mortal11)._sizeHint(); + case Mortal12: + return (value as Mortal12)._sizeHint(); + case Mortal13: + return (value as Mortal13)._sizeHint(); + case Mortal14: + return (value as Mortal14)._sizeHint(); + case Mortal15: + return (value as Mortal15)._sizeHint(); + case Mortal16: + return (value as Mortal16)._sizeHint(); + case Mortal17: + return (value as Mortal17)._sizeHint(); + case Mortal18: + return (value as Mortal18)._sizeHint(); + case Mortal19: + return (value as Mortal19)._sizeHint(); + case Mortal20: + return (value as Mortal20)._sizeHint(); + case Mortal21: + return (value as Mortal21)._sizeHint(); + case Mortal22: + return (value as Mortal22)._sizeHint(); + case Mortal23: + return (value as Mortal23)._sizeHint(); + case Mortal24: + return (value as Mortal24)._sizeHint(); + case Mortal25: + return (value as Mortal25)._sizeHint(); + case Mortal26: + return (value as Mortal26)._sizeHint(); + case Mortal27: + return (value as Mortal27)._sizeHint(); + case Mortal28: + return (value as Mortal28)._sizeHint(); + case Mortal29: + return (value as Mortal29)._sizeHint(); + case Mortal30: + return (value as Mortal30)._sizeHint(); + case Mortal31: + return (value as Mortal31)._sizeHint(); + case Mortal32: + return (value as Mortal32)._sizeHint(); + case Mortal33: + return (value as Mortal33)._sizeHint(); + case Mortal34: + return (value as Mortal34)._sizeHint(); + case Mortal35: + return (value as Mortal35)._sizeHint(); + case Mortal36: + return (value as Mortal36)._sizeHint(); + case Mortal37: + return (value as Mortal37)._sizeHint(); + case Mortal38: + return (value as Mortal38)._sizeHint(); + case Mortal39: + return (value as Mortal39)._sizeHint(); + case Mortal40: + return (value as Mortal40)._sizeHint(); + case Mortal41: + return (value as Mortal41)._sizeHint(); + case Mortal42: + return (value as Mortal42)._sizeHint(); + case Mortal43: + return (value as Mortal43)._sizeHint(); + case Mortal44: + return (value as Mortal44)._sizeHint(); + case Mortal45: + return (value as Mortal45)._sizeHint(); + case Mortal46: + return (value as Mortal46)._sizeHint(); + case Mortal47: + return (value as Mortal47)._sizeHint(); + case Mortal48: + return (value as Mortal48)._sizeHint(); + case Mortal49: + return (value as Mortal49)._sizeHint(); + case Mortal50: + return (value as Mortal50)._sizeHint(); + case Mortal51: + return (value as Mortal51)._sizeHint(); + case Mortal52: + return (value as Mortal52)._sizeHint(); + case Mortal53: + return (value as Mortal53)._sizeHint(); + case Mortal54: + return (value as Mortal54)._sizeHint(); + case Mortal55: + return (value as Mortal55)._sizeHint(); + case Mortal56: + return (value as Mortal56)._sizeHint(); + case Mortal57: + return (value as Mortal57)._sizeHint(); + case Mortal58: + return (value as Mortal58)._sizeHint(); + case Mortal59: + return (value as Mortal59)._sizeHint(); + case Mortal60: + return (value as Mortal60)._sizeHint(); + case Mortal61: + return (value as Mortal61)._sizeHint(); + case Mortal62: + return (value as Mortal62)._sizeHint(); + case Mortal63: + return (value as Mortal63)._sizeHint(); + case Mortal64: + return (value as Mortal64)._sizeHint(); + case Mortal65: + return (value as Mortal65)._sizeHint(); + case Mortal66: + return (value as Mortal66)._sizeHint(); + case Mortal67: + return (value as Mortal67)._sizeHint(); + case Mortal68: + return (value as Mortal68)._sizeHint(); + case Mortal69: + return (value as Mortal69)._sizeHint(); + case Mortal70: + return (value as Mortal70)._sizeHint(); + case Mortal71: + return (value as Mortal71)._sizeHint(); + case Mortal72: + return (value as Mortal72)._sizeHint(); + case Mortal73: + return (value as Mortal73)._sizeHint(); + case Mortal74: + return (value as Mortal74)._sizeHint(); + case Mortal75: + return (value as Mortal75)._sizeHint(); + case Mortal76: + return (value as Mortal76)._sizeHint(); + case Mortal77: + return (value as Mortal77)._sizeHint(); + case Mortal78: + return (value as Mortal78)._sizeHint(); + case Mortal79: + return (value as Mortal79)._sizeHint(); + case Mortal80: + return (value as Mortal80)._sizeHint(); + case Mortal81: + return (value as Mortal81)._sizeHint(); + case Mortal82: + return (value as Mortal82)._sizeHint(); + case Mortal83: + return (value as Mortal83)._sizeHint(); + case Mortal84: + return (value as Mortal84)._sizeHint(); + case Mortal85: + return (value as Mortal85)._sizeHint(); + case Mortal86: + return (value as Mortal86)._sizeHint(); + case Mortal87: + return (value as Mortal87)._sizeHint(); + case Mortal88: + return (value as Mortal88)._sizeHint(); + case Mortal89: + return (value as Mortal89)._sizeHint(); + case Mortal90: + return (value as Mortal90)._sizeHint(); + case Mortal91: + return (value as Mortal91)._sizeHint(); + case Mortal92: + return (value as Mortal92)._sizeHint(); + case Mortal93: + return (value as Mortal93)._sizeHint(); + case Mortal94: + return (value as Mortal94)._sizeHint(); + case Mortal95: + return (value as Mortal95)._sizeHint(); + case Mortal96: + return (value as Mortal96)._sizeHint(); + case Mortal97: + return (value as Mortal97)._sizeHint(); + case Mortal98: + return (value as Mortal98)._sizeHint(); + case Mortal99: + return (value as Mortal99)._sizeHint(); + case Mortal100: + return (value as Mortal100)._sizeHint(); + case Mortal101: + return (value as Mortal101)._sizeHint(); + case Mortal102: + return (value as Mortal102)._sizeHint(); + case Mortal103: + return (value as Mortal103)._sizeHint(); + case Mortal104: + return (value as Mortal104)._sizeHint(); + case Mortal105: + return (value as Mortal105)._sizeHint(); + case Mortal106: + return (value as Mortal106)._sizeHint(); + case Mortal107: + return (value as Mortal107)._sizeHint(); + case Mortal108: + return (value as Mortal108)._sizeHint(); + case Mortal109: + return (value as Mortal109)._sizeHint(); + case Mortal110: + return (value as Mortal110)._sizeHint(); + case Mortal111: + return (value as Mortal111)._sizeHint(); + case Mortal112: + return (value as Mortal112)._sizeHint(); + case Mortal113: + return (value as Mortal113)._sizeHint(); + case Mortal114: + return (value as Mortal114)._sizeHint(); + case Mortal115: + return (value as Mortal115)._sizeHint(); + case Mortal116: + return (value as Mortal116)._sizeHint(); + case Mortal117: + return (value as Mortal117)._sizeHint(); + case Mortal118: + return (value as Mortal118)._sizeHint(); + case Mortal119: + return (value as Mortal119)._sizeHint(); + case Mortal120: + return (value as Mortal120)._sizeHint(); + case Mortal121: + return (value as Mortal121)._sizeHint(); + case Mortal122: + return (value as Mortal122)._sizeHint(); + case Mortal123: + return (value as Mortal123)._sizeHint(); + case Mortal124: + return (value as Mortal124)._sizeHint(); + case Mortal125: + return (value as Mortal125)._sizeHint(); + case Mortal126: + return (value as Mortal126)._sizeHint(); + case Mortal127: + return (value as Mortal127)._sizeHint(); + case Mortal128: + return (value as Mortal128)._sizeHint(); + case Mortal129: + return (value as Mortal129)._sizeHint(); + case Mortal130: + return (value as Mortal130)._sizeHint(); + case Mortal131: + return (value as Mortal131)._sizeHint(); + case Mortal132: + return (value as Mortal132)._sizeHint(); + case Mortal133: + return (value as Mortal133)._sizeHint(); + case Mortal134: + return (value as Mortal134)._sizeHint(); + case Mortal135: + return (value as Mortal135)._sizeHint(); + case Mortal136: + return (value as Mortal136)._sizeHint(); + case Mortal137: + return (value as Mortal137)._sizeHint(); + case Mortal138: + return (value as Mortal138)._sizeHint(); + case Mortal139: + return (value as Mortal139)._sizeHint(); + case Mortal140: + return (value as Mortal140)._sizeHint(); + case Mortal141: + return (value as Mortal141)._sizeHint(); + case Mortal142: + return (value as Mortal142)._sizeHint(); + case Mortal143: + return (value as Mortal143)._sizeHint(); + case Mortal144: + return (value as Mortal144)._sizeHint(); + case Mortal145: + return (value as Mortal145)._sizeHint(); + case Mortal146: + return (value as Mortal146)._sizeHint(); + case Mortal147: + return (value as Mortal147)._sizeHint(); + case Mortal148: + return (value as Mortal148)._sizeHint(); + case Mortal149: + return (value as Mortal149)._sizeHint(); + case Mortal150: + return (value as Mortal150)._sizeHint(); + case Mortal151: + return (value as Mortal151)._sizeHint(); + case Mortal152: + return (value as Mortal152)._sizeHint(); + case Mortal153: + return (value as Mortal153)._sizeHint(); + case Mortal154: + return (value as Mortal154)._sizeHint(); + case Mortal155: + return (value as Mortal155)._sizeHint(); + case Mortal156: + return (value as Mortal156)._sizeHint(); + case Mortal157: + return (value as Mortal157)._sizeHint(); + case Mortal158: + return (value as Mortal158)._sizeHint(); + case Mortal159: + return (value as Mortal159)._sizeHint(); + case Mortal160: + return (value as Mortal160)._sizeHint(); + case Mortal161: + return (value as Mortal161)._sizeHint(); + case Mortal162: + return (value as Mortal162)._sizeHint(); + case Mortal163: + return (value as Mortal163)._sizeHint(); + case Mortal164: + return (value as Mortal164)._sizeHint(); + case Mortal165: + return (value as Mortal165)._sizeHint(); + case Mortal166: + return (value as Mortal166)._sizeHint(); + case Mortal167: + return (value as Mortal167)._sizeHint(); + case Mortal168: + return (value as Mortal168)._sizeHint(); + case Mortal169: + return (value as Mortal169)._sizeHint(); + case Mortal170: + return (value as Mortal170)._sizeHint(); + case Mortal171: + return (value as Mortal171)._sizeHint(); + case Mortal172: + return (value as Mortal172)._sizeHint(); + case Mortal173: + return (value as Mortal173)._sizeHint(); + case Mortal174: + return (value as Mortal174)._sizeHint(); + case Mortal175: + return (value as Mortal175)._sizeHint(); + case Mortal176: + return (value as Mortal176)._sizeHint(); + case Mortal177: + return (value as Mortal177)._sizeHint(); + case Mortal178: + return (value as Mortal178)._sizeHint(); + case Mortal179: + return (value as Mortal179)._sizeHint(); + case Mortal180: + return (value as Mortal180)._sizeHint(); + case Mortal181: + return (value as Mortal181)._sizeHint(); + case Mortal182: + return (value as Mortal182)._sizeHint(); + case Mortal183: + return (value as Mortal183)._sizeHint(); + case Mortal184: + return (value as Mortal184)._sizeHint(); + case Mortal185: + return (value as Mortal185)._sizeHint(); + case Mortal186: + return (value as Mortal186)._sizeHint(); + case Mortal187: + return (value as Mortal187)._sizeHint(); + case Mortal188: + return (value as Mortal188)._sizeHint(); + case Mortal189: + return (value as Mortal189)._sizeHint(); + case Mortal190: + return (value as Mortal190)._sizeHint(); + case Mortal191: + return (value as Mortal191)._sizeHint(); + case Mortal192: + return (value as Mortal192)._sizeHint(); + case Mortal193: + return (value as Mortal193)._sizeHint(); + case Mortal194: + return (value as Mortal194)._sizeHint(); + case Mortal195: + return (value as Mortal195)._sizeHint(); + case Mortal196: + return (value as Mortal196)._sizeHint(); + case Mortal197: + return (value as Mortal197)._sizeHint(); + case Mortal198: + return (value as Mortal198)._sizeHint(); + case Mortal199: + return (value as Mortal199)._sizeHint(); + case Mortal200: + return (value as Mortal200)._sizeHint(); + case Mortal201: + return (value as Mortal201)._sizeHint(); + case Mortal202: + return (value as Mortal202)._sizeHint(); + case Mortal203: + return (value as Mortal203)._sizeHint(); + case Mortal204: + return (value as Mortal204)._sizeHint(); + case Mortal205: + return (value as Mortal205)._sizeHint(); + case Mortal206: + return (value as Mortal206)._sizeHint(); + case Mortal207: + return (value as Mortal207)._sizeHint(); + case Mortal208: + return (value as Mortal208)._sizeHint(); + case Mortal209: + return (value as Mortal209)._sizeHint(); + case Mortal210: + return (value as Mortal210)._sizeHint(); + case Mortal211: + return (value as Mortal211)._sizeHint(); + case Mortal212: + return (value as Mortal212)._sizeHint(); + case Mortal213: + return (value as Mortal213)._sizeHint(); + case Mortal214: + return (value as Mortal214)._sizeHint(); + case Mortal215: + return (value as Mortal215)._sizeHint(); + case Mortal216: + return (value as Mortal216)._sizeHint(); + case Mortal217: + return (value as Mortal217)._sizeHint(); + case Mortal218: + return (value as Mortal218)._sizeHint(); + case Mortal219: + return (value as Mortal219)._sizeHint(); + case Mortal220: + return (value as Mortal220)._sizeHint(); + case Mortal221: + return (value as Mortal221)._sizeHint(); + case Mortal222: + return (value as Mortal222)._sizeHint(); + case Mortal223: + return (value as Mortal223)._sizeHint(); + case Mortal224: + return (value as Mortal224)._sizeHint(); + case Mortal225: + return (value as Mortal225)._sizeHint(); + case Mortal226: + return (value as Mortal226)._sizeHint(); + case Mortal227: + return (value as Mortal227)._sizeHint(); + case Mortal228: + return (value as Mortal228)._sizeHint(); + case Mortal229: + return (value as Mortal229)._sizeHint(); + case Mortal230: + return (value as Mortal230)._sizeHint(); + case Mortal231: + return (value as Mortal231)._sizeHint(); + case Mortal232: + return (value as Mortal232)._sizeHint(); + case Mortal233: + return (value as Mortal233)._sizeHint(); + case Mortal234: + return (value as Mortal234)._sizeHint(); + case Mortal235: + return (value as Mortal235)._sizeHint(); + case Mortal236: + return (value as Mortal236)._sizeHint(); + case Mortal237: + return (value as Mortal237)._sizeHint(); + case Mortal238: + return (value as Mortal238)._sizeHint(); + case Mortal239: + return (value as Mortal239)._sizeHint(); + case Mortal240: + return (value as Mortal240)._sizeHint(); + case Mortal241: + return (value as Mortal241)._sizeHint(); + case Mortal242: + return (value as Mortal242)._sizeHint(); + case Mortal243: + return (value as Mortal243)._sizeHint(); + case Mortal244: + return (value as Mortal244)._sizeHint(); + case Mortal245: + return (value as Mortal245)._sizeHint(); + case Mortal246: + return (value as Mortal246)._sizeHint(); + case Mortal247: + return (value as Mortal247)._sizeHint(); + case Mortal248: + return (value as Mortal248)._sizeHint(); + case Mortal249: + return (value as Mortal249)._sizeHint(); + case Mortal250: + return (value as Mortal250)._sizeHint(); + case Mortal251: + return (value as Mortal251)._sizeHint(); + case Mortal252: + return (value as Mortal252)._sizeHint(); + case Mortal253: + return (value as Mortal253)._sizeHint(); + case Mortal254: + return (value as Mortal254)._sizeHint(); + case Mortal255: + return (value as Mortal255)._sizeHint(); + default: + throw Exception('Era: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Immortal extends Era { + const Immortal(); + + @override + Map toJson() => {'Immortal': null}; + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + } + + @override + bool operator ==(Object other) => other is Immortal; + + @override + int get hashCode => runtimeType.hashCode; +} + +class Mortal1 extends Era { + const Mortal1(this.value0); + + factory Mortal1._decode(_i1.Input input) { + return Mortal1(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal1': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal1 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal2 extends Era { + const Mortal2(this.value0); + + factory Mortal2._decode(_i1.Input input) { + return Mortal2(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal2': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal2 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal3 extends Era { + const Mortal3(this.value0); + + factory Mortal3._decode(_i1.Input input) { + return Mortal3(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal3': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal3 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal4 extends Era { + const Mortal4(this.value0); + + factory Mortal4._decode(_i1.Input input) { + return Mortal4(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal4': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal4 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal5 extends Era { + const Mortal5(this.value0); + + factory Mortal5._decode(_i1.Input input) { + return Mortal5(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal5': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(5, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal5 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal6 extends Era { + const Mortal6(this.value0); + + factory Mortal6._decode(_i1.Input input) { + return Mortal6(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal6': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(6, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal6 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal7 extends Era { + const Mortal7(this.value0); + + factory Mortal7._decode(_i1.Input input) { + return Mortal7(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal7': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(7, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal7 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal8 extends Era { + const Mortal8(this.value0); + + factory Mortal8._decode(_i1.Input input) { + return Mortal8(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal8': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(8, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal8 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal9 extends Era { + const Mortal9(this.value0); + + factory Mortal9._decode(_i1.Input input) { + return Mortal9(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal9': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(9, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal9 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal10 extends Era { + const Mortal10(this.value0); + + factory Mortal10._decode(_i1.Input input) { + return Mortal10(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal10': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(10, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal10 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal11 extends Era { + const Mortal11(this.value0); + + factory Mortal11._decode(_i1.Input input) { + return Mortal11(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal11': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(11, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal11 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal12 extends Era { + const Mortal12(this.value0); + + factory Mortal12._decode(_i1.Input input) { + return Mortal12(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal12': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(12, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal12 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal13 extends Era { + const Mortal13(this.value0); + + factory Mortal13._decode(_i1.Input input) { + return Mortal13(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal13': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(13, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal13 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal14 extends Era { + const Mortal14(this.value0); + + factory Mortal14._decode(_i1.Input input) { + return Mortal14(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal14': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(14, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal14 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal15 extends Era { + const Mortal15(this.value0); + + factory Mortal15._decode(_i1.Input input) { + return Mortal15(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal15': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(15, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal15 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal16 extends Era { + const Mortal16(this.value0); + + factory Mortal16._decode(_i1.Input input) { + return Mortal16(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal16': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(16, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal16 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal17 extends Era { + const Mortal17(this.value0); + + factory Mortal17._decode(_i1.Input input) { + return Mortal17(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal17': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(17, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal17 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal18 extends Era { + const Mortal18(this.value0); + + factory Mortal18._decode(_i1.Input input) { + return Mortal18(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal18': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(18, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal18 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal19 extends Era { + const Mortal19(this.value0); + + factory Mortal19._decode(_i1.Input input) { + return Mortal19(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal19': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(19, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal19 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal20 extends Era { + const Mortal20(this.value0); + + factory Mortal20._decode(_i1.Input input) { + return Mortal20(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal20': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(20, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal20 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal21 extends Era { + const Mortal21(this.value0); + + factory Mortal21._decode(_i1.Input input) { + return Mortal21(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal21': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(21, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal21 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal22 extends Era { + const Mortal22(this.value0); + + factory Mortal22._decode(_i1.Input input) { + return Mortal22(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal22': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(22, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal22 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal23 extends Era { + const Mortal23(this.value0); + + factory Mortal23._decode(_i1.Input input) { + return Mortal23(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal23': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(23, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal23 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal24 extends Era { + const Mortal24(this.value0); + + factory Mortal24._decode(_i1.Input input) { + return Mortal24(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal24': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(24, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal24 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal25 extends Era { + const Mortal25(this.value0); + + factory Mortal25._decode(_i1.Input input) { + return Mortal25(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal25': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(25, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal25 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal26 extends Era { + const Mortal26(this.value0); + + factory Mortal26._decode(_i1.Input input) { + return Mortal26(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal26': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(26, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal26 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal27 extends Era { + const Mortal27(this.value0); + + factory Mortal27._decode(_i1.Input input) { + return Mortal27(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal27': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(27, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal27 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal28 extends Era { + const Mortal28(this.value0); + + factory Mortal28._decode(_i1.Input input) { + return Mortal28(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal28': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(28, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal28 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal29 extends Era { + const Mortal29(this.value0); + + factory Mortal29._decode(_i1.Input input) { + return Mortal29(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal29': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(29, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal29 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal30 extends Era { + const Mortal30(this.value0); + + factory Mortal30._decode(_i1.Input input) { + return Mortal30(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal30': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(30, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal30 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal31 extends Era { + const Mortal31(this.value0); + + factory Mortal31._decode(_i1.Input input) { + return Mortal31(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal31': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(31, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal31 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal32 extends Era { + const Mortal32(this.value0); + + factory Mortal32._decode(_i1.Input input) { + return Mortal32(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal32': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(32, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal32 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal33 extends Era { + const Mortal33(this.value0); + + factory Mortal33._decode(_i1.Input input) { + return Mortal33(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal33': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(33, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal33 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal34 extends Era { + const Mortal34(this.value0); + + factory Mortal34._decode(_i1.Input input) { + return Mortal34(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal34': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(34, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal34 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal35 extends Era { + const Mortal35(this.value0); + + factory Mortal35._decode(_i1.Input input) { + return Mortal35(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal35': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(35, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal35 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal36 extends Era { + const Mortal36(this.value0); + + factory Mortal36._decode(_i1.Input input) { + return Mortal36(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal36': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(36, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal36 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal37 extends Era { + const Mortal37(this.value0); + + factory Mortal37._decode(_i1.Input input) { + return Mortal37(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal37': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(37, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal37 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal38 extends Era { + const Mortal38(this.value0); + + factory Mortal38._decode(_i1.Input input) { + return Mortal38(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal38': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(38, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal38 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal39 extends Era { + const Mortal39(this.value0); + + factory Mortal39._decode(_i1.Input input) { + return Mortal39(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal39': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(39, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal39 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal40 extends Era { + const Mortal40(this.value0); + + factory Mortal40._decode(_i1.Input input) { + return Mortal40(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal40': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(40, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal40 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal41 extends Era { + const Mortal41(this.value0); + + factory Mortal41._decode(_i1.Input input) { + return Mortal41(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal41': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(41, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal41 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal42 extends Era { + const Mortal42(this.value0); + + factory Mortal42._decode(_i1.Input input) { + return Mortal42(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal42': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(42, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal42 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal43 extends Era { + const Mortal43(this.value0); + + factory Mortal43._decode(_i1.Input input) { + return Mortal43(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal43': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(43, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal43 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal44 extends Era { + const Mortal44(this.value0); + + factory Mortal44._decode(_i1.Input input) { + return Mortal44(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal44': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(44, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal44 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal45 extends Era { + const Mortal45(this.value0); + + factory Mortal45._decode(_i1.Input input) { + return Mortal45(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal45': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(45, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal45 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal46 extends Era { + const Mortal46(this.value0); + + factory Mortal46._decode(_i1.Input input) { + return Mortal46(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal46': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(46, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal46 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal47 extends Era { + const Mortal47(this.value0); + + factory Mortal47._decode(_i1.Input input) { + return Mortal47(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal47': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(47, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal47 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal48 extends Era { + const Mortal48(this.value0); + + factory Mortal48._decode(_i1.Input input) { + return Mortal48(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal48': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(48, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal48 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal49 extends Era { + const Mortal49(this.value0); + + factory Mortal49._decode(_i1.Input input) { + return Mortal49(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal49': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(49, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal49 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal50 extends Era { + const Mortal50(this.value0); + + factory Mortal50._decode(_i1.Input input) { + return Mortal50(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal50': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(50, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal50 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal51 extends Era { + const Mortal51(this.value0); + + factory Mortal51._decode(_i1.Input input) { + return Mortal51(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal51': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(51, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal51 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal52 extends Era { + const Mortal52(this.value0); + + factory Mortal52._decode(_i1.Input input) { + return Mortal52(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal52': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(52, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal52 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal53 extends Era { + const Mortal53(this.value0); + + factory Mortal53._decode(_i1.Input input) { + return Mortal53(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal53': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(53, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal53 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal54 extends Era { + const Mortal54(this.value0); + + factory Mortal54._decode(_i1.Input input) { + return Mortal54(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal54': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(54, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal54 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal55 extends Era { + const Mortal55(this.value0); + + factory Mortal55._decode(_i1.Input input) { + return Mortal55(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal55': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(55, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal55 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal56 extends Era { + const Mortal56(this.value0); + + factory Mortal56._decode(_i1.Input input) { + return Mortal56(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal56': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(56, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal56 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal57 extends Era { + const Mortal57(this.value0); + + factory Mortal57._decode(_i1.Input input) { + return Mortal57(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal57': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(57, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal57 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal58 extends Era { + const Mortal58(this.value0); + + factory Mortal58._decode(_i1.Input input) { + return Mortal58(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal58': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(58, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal58 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal59 extends Era { + const Mortal59(this.value0); + + factory Mortal59._decode(_i1.Input input) { + return Mortal59(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal59': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(59, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal59 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal60 extends Era { + const Mortal60(this.value0); + + factory Mortal60._decode(_i1.Input input) { + return Mortal60(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal60': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(60, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal60 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal61 extends Era { + const Mortal61(this.value0); + + factory Mortal61._decode(_i1.Input input) { + return Mortal61(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal61': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(61, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal61 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal62 extends Era { + const Mortal62(this.value0); + + factory Mortal62._decode(_i1.Input input) { + return Mortal62(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal62': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(62, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal62 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal63 extends Era { + const Mortal63(this.value0); + + factory Mortal63._decode(_i1.Input input) { + return Mortal63(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal63': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(63, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal63 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal64 extends Era { + const Mortal64(this.value0); + + factory Mortal64._decode(_i1.Input input) { + return Mortal64(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal64': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(64, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal64 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal65 extends Era { + const Mortal65(this.value0); + + factory Mortal65._decode(_i1.Input input) { + return Mortal65(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal65': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(65, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal65 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal66 extends Era { + const Mortal66(this.value0); + + factory Mortal66._decode(_i1.Input input) { + return Mortal66(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal66': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(66, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal66 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal67 extends Era { + const Mortal67(this.value0); + + factory Mortal67._decode(_i1.Input input) { + return Mortal67(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal67': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(67, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal67 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal68 extends Era { + const Mortal68(this.value0); + + factory Mortal68._decode(_i1.Input input) { + return Mortal68(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal68': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(68, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal68 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal69 extends Era { + const Mortal69(this.value0); + + factory Mortal69._decode(_i1.Input input) { + return Mortal69(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal69': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(69, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal69 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal70 extends Era { + const Mortal70(this.value0); + + factory Mortal70._decode(_i1.Input input) { + return Mortal70(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal70': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(70, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal70 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal71 extends Era { + const Mortal71(this.value0); + + factory Mortal71._decode(_i1.Input input) { + return Mortal71(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal71': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(71, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal71 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal72 extends Era { + const Mortal72(this.value0); + + factory Mortal72._decode(_i1.Input input) { + return Mortal72(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal72': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(72, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal72 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal73 extends Era { + const Mortal73(this.value0); + + factory Mortal73._decode(_i1.Input input) { + return Mortal73(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal73': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(73, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal73 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal74 extends Era { + const Mortal74(this.value0); + + factory Mortal74._decode(_i1.Input input) { + return Mortal74(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal74': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(74, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal74 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal75 extends Era { + const Mortal75(this.value0); + + factory Mortal75._decode(_i1.Input input) { + return Mortal75(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal75': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(75, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal75 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal76 extends Era { + const Mortal76(this.value0); + + factory Mortal76._decode(_i1.Input input) { + return Mortal76(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal76': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(76, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal76 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal77 extends Era { + const Mortal77(this.value0); + + factory Mortal77._decode(_i1.Input input) { + return Mortal77(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal77': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(77, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal77 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal78 extends Era { + const Mortal78(this.value0); + + factory Mortal78._decode(_i1.Input input) { + return Mortal78(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal78': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(78, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal78 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal79 extends Era { + const Mortal79(this.value0); + + factory Mortal79._decode(_i1.Input input) { + return Mortal79(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal79': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(79, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal79 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal80 extends Era { + const Mortal80(this.value0); + + factory Mortal80._decode(_i1.Input input) { + return Mortal80(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal80': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(80, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal80 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal81 extends Era { + const Mortal81(this.value0); + + factory Mortal81._decode(_i1.Input input) { + return Mortal81(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal81': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(81, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal81 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal82 extends Era { + const Mortal82(this.value0); + + factory Mortal82._decode(_i1.Input input) { + return Mortal82(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal82': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(82, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal82 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal83 extends Era { + const Mortal83(this.value0); + + factory Mortal83._decode(_i1.Input input) { + return Mortal83(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal83': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(83, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal83 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal84 extends Era { + const Mortal84(this.value0); + + factory Mortal84._decode(_i1.Input input) { + return Mortal84(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal84': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(84, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal84 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal85 extends Era { + const Mortal85(this.value0); + + factory Mortal85._decode(_i1.Input input) { + return Mortal85(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal85': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(85, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal85 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal86 extends Era { + const Mortal86(this.value0); + + factory Mortal86._decode(_i1.Input input) { + return Mortal86(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal86': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(86, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal86 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal87 extends Era { + const Mortal87(this.value0); + + factory Mortal87._decode(_i1.Input input) { + return Mortal87(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal87': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(87, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal87 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal88 extends Era { + const Mortal88(this.value0); + + factory Mortal88._decode(_i1.Input input) { + return Mortal88(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal88': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(88, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal88 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal89 extends Era { + const Mortal89(this.value0); + + factory Mortal89._decode(_i1.Input input) { + return Mortal89(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal89': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(89, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal89 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal90 extends Era { + const Mortal90(this.value0); + + factory Mortal90._decode(_i1.Input input) { + return Mortal90(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal90': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(90, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal90 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal91 extends Era { + const Mortal91(this.value0); + + factory Mortal91._decode(_i1.Input input) { + return Mortal91(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal91': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(91, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal91 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal92 extends Era { + const Mortal92(this.value0); + + factory Mortal92._decode(_i1.Input input) { + return Mortal92(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal92': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(92, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal92 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal93 extends Era { + const Mortal93(this.value0); + + factory Mortal93._decode(_i1.Input input) { + return Mortal93(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal93': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(93, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal93 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal94 extends Era { + const Mortal94(this.value0); + + factory Mortal94._decode(_i1.Input input) { + return Mortal94(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal94': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(94, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal94 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal95 extends Era { + const Mortal95(this.value0); + + factory Mortal95._decode(_i1.Input input) { + return Mortal95(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal95': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(95, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal95 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal96 extends Era { + const Mortal96(this.value0); + + factory Mortal96._decode(_i1.Input input) { + return Mortal96(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal96': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(96, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal96 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal97 extends Era { + const Mortal97(this.value0); + + factory Mortal97._decode(_i1.Input input) { + return Mortal97(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal97': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(97, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal97 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal98 extends Era { + const Mortal98(this.value0); + + factory Mortal98._decode(_i1.Input input) { + return Mortal98(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal98': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(98, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal98 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal99 extends Era { + const Mortal99(this.value0); + + factory Mortal99._decode(_i1.Input input) { + return Mortal99(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal99': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(99, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal99 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal100 extends Era { + const Mortal100(this.value0); + + factory Mortal100._decode(_i1.Input input) { + return Mortal100(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal100': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(100, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal100 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal101 extends Era { + const Mortal101(this.value0); + + factory Mortal101._decode(_i1.Input input) { + return Mortal101(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal101': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(101, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal101 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal102 extends Era { + const Mortal102(this.value0); + + factory Mortal102._decode(_i1.Input input) { + return Mortal102(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal102': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(102, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal102 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal103 extends Era { + const Mortal103(this.value0); + + factory Mortal103._decode(_i1.Input input) { + return Mortal103(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal103': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(103, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal103 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal104 extends Era { + const Mortal104(this.value0); + + factory Mortal104._decode(_i1.Input input) { + return Mortal104(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal104': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(104, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal104 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal105 extends Era { + const Mortal105(this.value0); + + factory Mortal105._decode(_i1.Input input) { + return Mortal105(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal105': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(105, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal105 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal106 extends Era { + const Mortal106(this.value0); + + factory Mortal106._decode(_i1.Input input) { + return Mortal106(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal106': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(106, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal106 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal107 extends Era { + const Mortal107(this.value0); + + factory Mortal107._decode(_i1.Input input) { + return Mortal107(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal107': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(107, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal107 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal108 extends Era { + const Mortal108(this.value0); + + factory Mortal108._decode(_i1.Input input) { + return Mortal108(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal108': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(108, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal108 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal109 extends Era { + const Mortal109(this.value0); + + factory Mortal109._decode(_i1.Input input) { + return Mortal109(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal109': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(109, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal109 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal110 extends Era { + const Mortal110(this.value0); + + factory Mortal110._decode(_i1.Input input) { + return Mortal110(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal110': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(110, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal110 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal111 extends Era { + const Mortal111(this.value0); + + factory Mortal111._decode(_i1.Input input) { + return Mortal111(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal111': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(111, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal111 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal112 extends Era { + const Mortal112(this.value0); + + factory Mortal112._decode(_i1.Input input) { + return Mortal112(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal112': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(112, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal112 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal113 extends Era { + const Mortal113(this.value0); + + factory Mortal113._decode(_i1.Input input) { + return Mortal113(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal113': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(113, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal113 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal114 extends Era { + const Mortal114(this.value0); + + factory Mortal114._decode(_i1.Input input) { + return Mortal114(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal114': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(114, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal114 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal115 extends Era { + const Mortal115(this.value0); + + factory Mortal115._decode(_i1.Input input) { + return Mortal115(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal115': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(115, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal115 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal116 extends Era { + const Mortal116(this.value0); + + factory Mortal116._decode(_i1.Input input) { + return Mortal116(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal116': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(116, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal116 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal117 extends Era { + const Mortal117(this.value0); + + factory Mortal117._decode(_i1.Input input) { + return Mortal117(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal117': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(117, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal117 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal118 extends Era { + const Mortal118(this.value0); + + factory Mortal118._decode(_i1.Input input) { + return Mortal118(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal118': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(118, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal118 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal119 extends Era { + const Mortal119(this.value0); + + factory Mortal119._decode(_i1.Input input) { + return Mortal119(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal119': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(119, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal119 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal120 extends Era { + const Mortal120(this.value0); + + factory Mortal120._decode(_i1.Input input) { + return Mortal120(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal120': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(120, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal120 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal121 extends Era { + const Mortal121(this.value0); + + factory Mortal121._decode(_i1.Input input) { + return Mortal121(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal121': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(121, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal121 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal122 extends Era { + const Mortal122(this.value0); + + factory Mortal122._decode(_i1.Input input) { + return Mortal122(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal122': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(122, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal122 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal123 extends Era { + const Mortal123(this.value0); + + factory Mortal123._decode(_i1.Input input) { + return Mortal123(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal123': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(123, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal123 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal124 extends Era { + const Mortal124(this.value0); + + factory Mortal124._decode(_i1.Input input) { + return Mortal124(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal124': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(124, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal124 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal125 extends Era { + const Mortal125(this.value0); + + factory Mortal125._decode(_i1.Input input) { + return Mortal125(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal125': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(125, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal125 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal126 extends Era { + const Mortal126(this.value0); + + factory Mortal126._decode(_i1.Input input) { + return Mortal126(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal126': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(126, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal126 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal127 extends Era { + const Mortal127(this.value0); + + factory Mortal127._decode(_i1.Input input) { + return Mortal127(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal127': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(127, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal127 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal128 extends Era { + const Mortal128(this.value0); + + factory Mortal128._decode(_i1.Input input) { + return Mortal128(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal128': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(128, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal128 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal129 extends Era { + const Mortal129(this.value0); + + factory Mortal129._decode(_i1.Input input) { + return Mortal129(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal129': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(129, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal129 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal130 extends Era { + const Mortal130(this.value0); + + factory Mortal130._decode(_i1.Input input) { + return Mortal130(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal130': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(130, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal130 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal131 extends Era { + const Mortal131(this.value0); + + factory Mortal131._decode(_i1.Input input) { + return Mortal131(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal131': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(131, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal131 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal132 extends Era { + const Mortal132(this.value0); + + factory Mortal132._decode(_i1.Input input) { + return Mortal132(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal132': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(132, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal132 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal133 extends Era { + const Mortal133(this.value0); + + factory Mortal133._decode(_i1.Input input) { + return Mortal133(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal133': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(133, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal133 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal134 extends Era { + const Mortal134(this.value0); + + factory Mortal134._decode(_i1.Input input) { + return Mortal134(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal134': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(134, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal134 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal135 extends Era { + const Mortal135(this.value0); + + factory Mortal135._decode(_i1.Input input) { + return Mortal135(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal135': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(135, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal135 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal136 extends Era { + const Mortal136(this.value0); + + factory Mortal136._decode(_i1.Input input) { + return Mortal136(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal136': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(136, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal136 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal137 extends Era { + const Mortal137(this.value0); + + factory Mortal137._decode(_i1.Input input) { + return Mortal137(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal137': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(137, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal137 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal138 extends Era { + const Mortal138(this.value0); + + factory Mortal138._decode(_i1.Input input) { + return Mortal138(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal138': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(138, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal138 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal139 extends Era { + const Mortal139(this.value0); + + factory Mortal139._decode(_i1.Input input) { + return Mortal139(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal139': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(139, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal139 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal140 extends Era { + const Mortal140(this.value0); + + factory Mortal140._decode(_i1.Input input) { + return Mortal140(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal140': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(140, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal140 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal141 extends Era { + const Mortal141(this.value0); + + factory Mortal141._decode(_i1.Input input) { + return Mortal141(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal141': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(141, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal141 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal142 extends Era { + const Mortal142(this.value0); + + factory Mortal142._decode(_i1.Input input) { + return Mortal142(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal142': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(142, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal142 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal143 extends Era { + const Mortal143(this.value0); + + factory Mortal143._decode(_i1.Input input) { + return Mortal143(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal143': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(143, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal143 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal144 extends Era { + const Mortal144(this.value0); + + factory Mortal144._decode(_i1.Input input) { + return Mortal144(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal144': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(144, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal144 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal145 extends Era { + const Mortal145(this.value0); + + factory Mortal145._decode(_i1.Input input) { + return Mortal145(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal145': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(145, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal145 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal146 extends Era { + const Mortal146(this.value0); + + factory Mortal146._decode(_i1.Input input) { + return Mortal146(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal146': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(146, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal146 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal147 extends Era { + const Mortal147(this.value0); + + factory Mortal147._decode(_i1.Input input) { + return Mortal147(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal147': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(147, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal147 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal148 extends Era { + const Mortal148(this.value0); + + factory Mortal148._decode(_i1.Input input) { + return Mortal148(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal148': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(148, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal148 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal149 extends Era { + const Mortal149(this.value0); + + factory Mortal149._decode(_i1.Input input) { + return Mortal149(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal149': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(149, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal149 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal150 extends Era { + const Mortal150(this.value0); + + factory Mortal150._decode(_i1.Input input) { + return Mortal150(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal150': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(150, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal150 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal151 extends Era { + const Mortal151(this.value0); + + factory Mortal151._decode(_i1.Input input) { + return Mortal151(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal151': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(151, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal151 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal152 extends Era { + const Mortal152(this.value0); + + factory Mortal152._decode(_i1.Input input) { + return Mortal152(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal152': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(152, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal152 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal153 extends Era { + const Mortal153(this.value0); + + factory Mortal153._decode(_i1.Input input) { + return Mortal153(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal153': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(153, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal153 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal154 extends Era { + const Mortal154(this.value0); + + factory Mortal154._decode(_i1.Input input) { + return Mortal154(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal154': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(154, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal154 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal155 extends Era { + const Mortal155(this.value0); + + factory Mortal155._decode(_i1.Input input) { + return Mortal155(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal155': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(155, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal155 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal156 extends Era { + const Mortal156(this.value0); + + factory Mortal156._decode(_i1.Input input) { + return Mortal156(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal156': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(156, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal156 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal157 extends Era { + const Mortal157(this.value0); + + factory Mortal157._decode(_i1.Input input) { + return Mortal157(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal157': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(157, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal157 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal158 extends Era { + const Mortal158(this.value0); + + factory Mortal158._decode(_i1.Input input) { + return Mortal158(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal158': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(158, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal158 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal159 extends Era { + const Mortal159(this.value0); + + factory Mortal159._decode(_i1.Input input) { + return Mortal159(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal159': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(159, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal159 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal160 extends Era { + const Mortal160(this.value0); + + factory Mortal160._decode(_i1.Input input) { + return Mortal160(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal160': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(160, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal160 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal161 extends Era { + const Mortal161(this.value0); + + factory Mortal161._decode(_i1.Input input) { + return Mortal161(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal161': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(161, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal161 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal162 extends Era { + const Mortal162(this.value0); + + factory Mortal162._decode(_i1.Input input) { + return Mortal162(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal162': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(162, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal162 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal163 extends Era { + const Mortal163(this.value0); + + factory Mortal163._decode(_i1.Input input) { + return Mortal163(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal163': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(163, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal163 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal164 extends Era { + const Mortal164(this.value0); + + factory Mortal164._decode(_i1.Input input) { + return Mortal164(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal164': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(164, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal164 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal165 extends Era { + const Mortal165(this.value0); + + factory Mortal165._decode(_i1.Input input) { + return Mortal165(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal165': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(165, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal165 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal166 extends Era { + const Mortal166(this.value0); + + factory Mortal166._decode(_i1.Input input) { + return Mortal166(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal166': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(166, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal166 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal167 extends Era { + const Mortal167(this.value0); + + factory Mortal167._decode(_i1.Input input) { + return Mortal167(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal167': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(167, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal167 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal168 extends Era { + const Mortal168(this.value0); + + factory Mortal168._decode(_i1.Input input) { + return Mortal168(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal168': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(168, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal168 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal169 extends Era { + const Mortal169(this.value0); + + factory Mortal169._decode(_i1.Input input) { + return Mortal169(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal169': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(169, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal169 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal170 extends Era { + const Mortal170(this.value0); + + factory Mortal170._decode(_i1.Input input) { + return Mortal170(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal170': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(170, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal170 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal171 extends Era { + const Mortal171(this.value0); + + factory Mortal171._decode(_i1.Input input) { + return Mortal171(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal171': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(171, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal171 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal172 extends Era { + const Mortal172(this.value0); + + factory Mortal172._decode(_i1.Input input) { + return Mortal172(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal172': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(172, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal172 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal173 extends Era { + const Mortal173(this.value0); + + factory Mortal173._decode(_i1.Input input) { + return Mortal173(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal173': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(173, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal173 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal174 extends Era { + const Mortal174(this.value0); + + factory Mortal174._decode(_i1.Input input) { + return Mortal174(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal174': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(174, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal174 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal175 extends Era { + const Mortal175(this.value0); + + factory Mortal175._decode(_i1.Input input) { + return Mortal175(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal175': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(175, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal175 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal176 extends Era { + const Mortal176(this.value0); + + factory Mortal176._decode(_i1.Input input) { + return Mortal176(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal176': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(176, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal176 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal177 extends Era { + const Mortal177(this.value0); + + factory Mortal177._decode(_i1.Input input) { + return Mortal177(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal177': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(177, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal177 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal178 extends Era { + const Mortal178(this.value0); + + factory Mortal178._decode(_i1.Input input) { + return Mortal178(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal178': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(178, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal178 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal179 extends Era { + const Mortal179(this.value0); + + factory Mortal179._decode(_i1.Input input) { + return Mortal179(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal179': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(179, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal179 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal180 extends Era { + const Mortal180(this.value0); + + factory Mortal180._decode(_i1.Input input) { + return Mortal180(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal180': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(180, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal180 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal181 extends Era { + const Mortal181(this.value0); + + factory Mortal181._decode(_i1.Input input) { + return Mortal181(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal181': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(181, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal181 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal182 extends Era { + const Mortal182(this.value0); + + factory Mortal182._decode(_i1.Input input) { + return Mortal182(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal182': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(182, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal182 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal183 extends Era { + const Mortal183(this.value0); + + factory Mortal183._decode(_i1.Input input) { + return Mortal183(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal183': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(183, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal183 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal184 extends Era { + const Mortal184(this.value0); + + factory Mortal184._decode(_i1.Input input) { + return Mortal184(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal184': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(184, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal184 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal185 extends Era { + const Mortal185(this.value0); + + factory Mortal185._decode(_i1.Input input) { + return Mortal185(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal185': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(185, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal185 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal186 extends Era { + const Mortal186(this.value0); + + factory Mortal186._decode(_i1.Input input) { + return Mortal186(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal186': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(186, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal186 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal187 extends Era { + const Mortal187(this.value0); + + factory Mortal187._decode(_i1.Input input) { + return Mortal187(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal187': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(187, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal187 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal188 extends Era { + const Mortal188(this.value0); + + factory Mortal188._decode(_i1.Input input) { + return Mortal188(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal188': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(188, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal188 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal189 extends Era { + const Mortal189(this.value0); + + factory Mortal189._decode(_i1.Input input) { + return Mortal189(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal189': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(189, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal189 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal190 extends Era { + const Mortal190(this.value0); + + factory Mortal190._decode(_i1.Input input) { + return Mortal190(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal190': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(190, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal190 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal191 extends Era { + const Mortal191(this.value0); + + factory Mortal191._decode(_i1.Input input) { + return Mortal191(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal191': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(191, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal191 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal192 extends Era { + const Mortal192(this.value0); + + factory Mortal192._decode(_i1.Input input) { + return Mortal192(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal192': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(192, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal192 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal193 extends Era { + const Mortal193(this.value0); + + factory Mortal193._decode(_i1.Input input) { + return Mortal193(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal193': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(193, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal193 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal194 extends Era { + const Mortal194(this.value0); + + factory Mortal194._decode(_i1.Input input) { + return Mortal194(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal194': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(194, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal194 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal195 extends Era { + const Mortal195(this.value0); + + factory Mortal195._decode(_i1.Input input) { + return Mortal195(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal195': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(195, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal195 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal196 extends Era { + const Mortal196(this.value0); + + factory Mortal196._decode(_i1.Input input) { + return Mortal196(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal196': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(196, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal196 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal197 extends Era { + const Mortal197(this.value0); + + factory Mortal197._decode(_i1.Input input) { + return Mortal197(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal197': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(197, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal197 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal198 extends Era { + const Mortal198(this.value0); + + factory Mortal198._decode(_i1.Input input) { + return Mortal198(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal198': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(198, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal198 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal199 extends Era { + const Mortal199(this.value0); + + factory Mortal199._decode(_i1.Input input) { + return Mortal199(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal199': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(199, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal199 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal200 extends Era { + const Mortal200(this.value0); + + factory Mortal200._decode(_i1.Input input) { + return Mortal200(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal200': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(200, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal200 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal201 extends Era { + const Mortal201(this.value0); + + factory Mortal201._decode(_i1.Input input) { + return Mortal201(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal201': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(201, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal201 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal202 extends Era { + const Mortal202(this.value0); + + factory Mortal202._decode(_i1.Input input) { + return Mortal202(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal202': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(202, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal202 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal203 extends Era { + const Mortal203(this.value0); + + factory Mortal203._decode(_i1.Input input) { + return Mortal203(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal203': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(203, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal203 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal204 extends Era { + const Mortal204(this.value0); + + factory Mortal204._decode(_i1.Input input) { + return Mortal204(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal204': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(204, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal204 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal205 extends Era { + const Mortal205(this.value0); + + factory Mortal205._decode(_i1.Input input) { + return Mortal205(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal205': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(205, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal205 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal206 extends Era { + const Mortal206(this.value0); + + factory Mortal206._decode(_i1.Input input) { + return Mortal206(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal206': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(206, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal206 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal207 extends Era { + const Mortal207(this.value0); + + factory Mortal207._decode(_i1.Input input) { + return Mortal207(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal207': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(207, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal207 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal208 extends Era { + const Mortal208(this.value0); + + factory Mortal208._decode(_i1.Input input) { + return Mortal208(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal208': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(208, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal208 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal209 extends Era { + const Mortal209(this.value0); + + factory Mortal209._decode(_i1.Input input) { + return Mortal209(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal209': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(209, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal209 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal210 extends Era { + const Mortal210(this.value0); + + factory Mortal210._decode(_i1.Input input) { + return Mortal210(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal210': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(210, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal210 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal211 extends Era { + const Mortal211(this.value0); + + factory Mortal211._decode(_i1.Input input) { + return Mortal211(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal211': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(211, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal211 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal212 extends Era { + const Mortal212(this.value0); + + factory Mortal212._decode(_i1.Input input) { + return Mortal212(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal212': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(212, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal212 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal213 extends Era { + const Mortal213(this.value0); + + factory Mortal213._decode(_i1.Input input) { + return Mortal213(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal213': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(213, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal213 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal214 extends Era { + const Mortal214(this.value0); + + factory Mortal214._decode(_i1.Input input) { + return Mortal214(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal214': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(214, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal214 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal215 extends Era { + const Mortal215(this.value0); + + factory Mortal215._decode(_i1.Input input) { + return Mortal215(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal215': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(215, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal215 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal216 extends Era { + const Mortal216(this.value0); + + factory Mortal216._decode(_i1.Input input) { + return Mortal216(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal216': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(216, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal216 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal217 extends Era { + const Mortal217(this.value0); + + factory Mortal217._decode(_i1.Input input) { + return Mortal217(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal217': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(217, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal217 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal218 extends Era { + const Mortal218(this.value0); + + factory Mortal218._decode(_i1.Input input) { + return Mortal218(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal218': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(218, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal218 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal219 extends Era { + const Mortal219(this.value0); + + factory Mortal219._decode(_i1.Input input) { + return Mortal219(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal219': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(219, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal219 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal220 extends Era { + const Mortal220(this.value0); + + factory Mortal220._decode(_i1.Input input) { + return Mortal220(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal220': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(220, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal220 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal221 extends Era { + const Mortal221(this.value0); + + factory Mortal221._decode(_i1.Input input) { + return Mortal221(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal221': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(221, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal221 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal222 extends Era { + const Mortal222(this.value0); + + factory Mortal222._decode(_i1.Input input) { + return Mortal222(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal222': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(222, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal222 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal223 extends Era { + const Mortal223(this.value0); + + factory Mortal223._decode(_i1.Input input) { + return Mortal223(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal223': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(223, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal223 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal224 extends Era { + const Mortal224(this.value0); + + factory Mortal224._decode(_i1.Input input) { + return Mortal224(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal224': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(224, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal224 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal225 extends Era { + const Mortal225(this.value0); + + factory Mortal225._decode(_i1.Input input) { + return Mortal225(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal225': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(225, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal225 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal226 extends Era { + const Mortal226(this.value0); + + factory Mortal226._decode(_i1.Input input) { + return Mortal226(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal226': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(226, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal226 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal227 extends Era { + const Mortal227(this.value0); + + factory Mortal227._decode(_i1.Input input) { + return Mortal227(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal227': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(227, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal227 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal228 extends Era { + const Mortal228(this.value0); + + factory Mortal228._decode(_i1.Input input) { + return Mortal228(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal228': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(228, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal228 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal229 extends Era { + const Mortal229(this.value0); + + factory Mortal229._decode(_i1.Input input) { + return Mortal229(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal229': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(229, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal229 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal230 extends Era { + const Mortal230(this.value0); + + factory Mortal230._decode(_i1.Input input) { + return Mortal230(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal230': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(230, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal230 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal231 extends Era { + const Mortal231(this.value0); + + factory Mortal231._decode(_i1.Input input) { + return Mortal231(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal231': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(231, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal231 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal232 extends Era { + const Mortal232(this.value0); + + factory Mortal232._decode(_i1.Input input) { + return Mortal232(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal232': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(232, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal232 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal233 extends Era { + const Mortal233(this.value0); + + factory Mortal233._decode(_i1.Input input) { + return Mortal233(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal233': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(233, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal233 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal234 extends Era { + const Mortal234(this.value0); + + factory Mortal234._decode(_i1.Input input) { + return Mortal234(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal234': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(234, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal234 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal235 extends Era { + const Mortal235(this.value0); + + factory Mortal235._decode(_i1.Input input) { + return Mortal235(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal235': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(235, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal235 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal236 extends Era { + const Mortal236(this.value0); + + factory Mortal236._decode(_i1.Input input) { + return Mortal236(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal236': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(236, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal236 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal237 extends Era { + const Mortal237(this.value0); + + factory Mortal237._decode(_i1.Input input) { + return Mortal237(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal237': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(237, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal237 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal238 extends Era { + const Mortal238(this.value0); + + factory Mortal238._decode(_i1.Input input) { + return Mortal238(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal238': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(238, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal238 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal239 extends Era { + const Mortal239(this.value0); + + factory Mortal239._decode(_i1.Input input) { + return Mortal239(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal239': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(239, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal239 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal240 extends Era { + const Mortal240(this.value0); + + factory Mortal240._decode(_i1.Input input) { + return Mortal240(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal240': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(240, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal240 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal241 extends Era { + const Mortal241(this.value0); + + factory Mortal241._decode(_i1.Input input) { + return Mortal241(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal241': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(241, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal241 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal242 extends Era { + const Mortal242(this.value0); + + factory Mortal242._decode(_i1.Input input) { + return Mortal242(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal242': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(242, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal242 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal243 extends Era { + const Mortal243(this.value0); + + factory Mortal243._decode(_i1.Input input) { + return Mortal243(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal243': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(243, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal243 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal244 extends Era { + const Mortal244(this.value0); + + factory Mortal244._decode(_i1.Input input) { + return Mortal244(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal244': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(244, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal244 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal245 extends Era { + const Mortal245(this.value0); + + factory Mortal245._decode(_i1.Input input) { + return Mortal245(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal245': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(245, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal245 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal246 extends Era { + const Mortal246(this.value0); + + factory Mortal246._decode(_i1.Input input) { + return Mortal246(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal246': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(246, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal246 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal247 extends Era { + const Mortal247(this.value0); + + factory Mortal247._decode(_i1.Input input) { + return Mortal247(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal247': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(247, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal247 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal248 extends Era { + const Mortal248(this.value0); + + factory Mortal248._decode(_i1.Input input) { + return Mortal248(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal248': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(248, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal248 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal249 extends Era { + const Mortal249(this.value0); + + factory Mortal249._decode(_i1.Input input) { + return Mortal249(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal249': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(249, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal249 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal250 extends Era { + const Mortal250(this.value0); + + factory Mortal250._decode(_i1.Input input) { + return Mortal250(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal250': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(250, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal250 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal251 extends Era { + const Mortal251(this.value0); + + factory Mortal251._decode(_i1.Input input) { + return Mortal251(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal251': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(251, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal251 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal252 extends Era { + const Mortal252(this.value0); + + factory Mortal252._decode(_i1.Input input) { + return Mortal252(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal252': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(252, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal252 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal253 extends Era { + const Mortal253(this.value0); + + factory Mortal253._decode(_i1.Input input) { + return Mortal253(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal253': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(253, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal253 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal254 extends Era { + const Mortal254(this.value0); + + factory Mortal254._decode(_i1.Input input) { + return Mortal254(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal254': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(254, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal254 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Mortal255 extends Era { + const Mortal255(this.value0); + + factory Mortal255._decode(_i1.Input input) { + return Mortal255(_i1.U8Codec.codec.decode(input)); + } + + final int value0; + + @override + Map toJson() => {'Mortal255': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8Codec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(255, output); + _i1.U8Codec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Mortal255 && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/unchecked_extrinsic/unchecked_extrinsic.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/unchecked_extrinsic/unchecked_extrinsic.dart new file mode 100644 index 00000000..23205b0a --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/generic/unchecked_extrinsic/unchecked_extrinsic.dart @@ -0,0 +1,23 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +typedef UncheckedExtrinsic = List; + +class UncheckedExtrinsicCodec with _i1.Codec { + const UncheckedExtrinsicCodec(); + + @override + UncheckedExtrinsic decode(_i1.Input input) { + return _i1.U8SequenceCodec.codec.decode(input); + } + + @override + void encodeTo(UncheckedExtrinsic value, _i1.Output output) { + _i1.U8SequenceCodec.codec.encodeTo(value, output); + } + + @override + int sizeHint(UncheckedExtrinsic value) { + return _i1.U8SequenceCodec.codec.sizeHint(value); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/module_error.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/module_error.dart new file mode 100644 index 00000000..d1980872 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/module_error.dart @@ -0,0 +1,57 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i3; + +class ModuleError { + const ModuleError({required this.index, required this.error}); + + factory ModuleError.decode(_i1.Input input) { + return codec.decode(input); + } + + /// u8 + final int index; + + /// [u8; MAX_MODULE_ERROR_ENCODED_SIZE] + final List error; + + static const $ModuleErrorCodec codec = $ModuleErrorCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'index': index, 'error': error.toList()}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is ModuleError && other.index == index && _i3.listsEqual(other.error, error); + + @override + int get hashCode => Object.hash(index, error); +} + +class $ModuleErrorCodec with _i1.Codec { + const $ModuleErrorCodec(); + + @override + void encodeTo(ModuleError obj, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(obj.index, output); + const _i1.U8ArrayCodec(4).encodeTo(obj.error, output); + } + + @override + ModuleError decode(_i1.Input input) { + return ModuleError(index: _i1.U8Codec.codec.decode(input), error: const _i1.U8ArrayCodec(4).decode(input)); + } + + @override + int sizeHint(ModuleError obj) { + int size = 0; + size = size + _i1.U8Codec.codec.sizeHint(obj.index); + size = size + const _i1.U8ArrayCodec(4).sizeHint(obj.error); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/multiaddress/multi_address.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/multiaddress/multi_address.dart new file mode 100644 index 00000000..0466c0f0 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/multiaddress/multi_address.dart @@ -0,0 +1,276 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i4; + +import '../../sp_core/crypto/account_id32.dart' as _i3; + +abstract class MultiAddress { + const MultiAddress(); + + factory MultiAddress.decode(_i1.Input input) { + return codec.decode(input); + } + + static const $MultiAddressCodec codec = $MultiAddressCodec(); + + static const $MultiAddress values = $MultiAddress(); + + _i2.Uint8List encode() { + final output = _i1.ByteOutput(codec.sizeHint(this)); + codec.encodeTo(this, output); + return output.toBytes(); + } + + int sizeHint() { + return codec.sizeHint(this); + } + + Map toJson(); +} + +class $MultiAddress { + const $MultiAddress(); + + Id id(_i3.AccountId32 value0) { + return Id(value0); + } + + Index index(BigInt value0) { + return Index(value0); + } + + Raw raw(List value0) { + return Raw(value0); + } + + Address32 address32(List value0) { + return Address32(value0); + } + + Address20 address20(List value0) { + return Address20(value0); + } +} + +class $MultiAddressCodec with _i1.Codec { + const $MultiAddressCodec(); + + @override + MultiAddress decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return Id._decode(input); + case 1: + return Index._decode(input); + case 2: + return Raw._decode(input); + case 3: + return Address32._decode(input); + case 4: + return Address20._decode(input); + default: + throw Exception('MultiAddress: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(MultiAddress value, _i1.Output output) { + switch (value.runtimeType) { + case Id: + (value as Id).encodeTo(output); + break; + case Index: + (value as Index).encodeTo(output); + break; + case Raw: + (value as Raw).encodeTo(output); + break; + case Address32: + (value as Address32).encodeTo(output); + break; + case Address20: + (value as Address20).encodeTo(output); + break; + default: + throw Exception('MultiAddress: Unsupported "$value" of type "${value.runtimeType}"'); + } + } + + @override + int sizeHint(MultiAddress value) { + switch (value.runtimeType) { + case Id: + return (value as Id)._sizeHint(); + case Index: + return (value as Index)._sizeHint(); + case Raw: + return (value as Raw)._sizeHint(); + case Address32: + return (value as Address32)._sizeHint(); + case Address20: + return (value as Address20)._sizeHint(); + default: + throw Exception('MultiAddress: Unsupported "$value" of type "${value.runtimeType}"'); + } + } +} + +class Id extends MultiAddress { + const Id(this.value0); + + factory Id._decode(_i1.Input input) { + return Id(const _i1.U8ArrayCodec(32).decode(input)); + } + + /// AccountId + final _i3.AccountId32 value0; + + @override + Map> toJson() => {'Id': value0.toList()}; + + int _sizeHint() { + int size = 1; + size = size + const _i3.AccountId32Codec().sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(0, output); + const _i1.U8ArrayCodec(32).encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Id && _i4.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} + +class Index extends MultiAddress { + const Index(this.value0); + + factory Index._decode(_i1.Input input) { + return Index(_i1.CompactBigIntCodec.codec.decode(input)); + } + + /// AccountIndex + final BigInt value0; + + @override + Map toJson() => {'Index': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(1, output); + _i1.CompactBigIntCodec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Index && other.value0 == value0; + + @override + int get hashCode => value0.hashCode; +} + +class Raw extends MultiAddress { + const Raw(this.value0); + + factory Raw._decode(_i1.Input input) { + return Raw(_i1.U8SequenceCodec.codec.decode(input)); + } + + /// Vec + final List value0; + + @override + Map> toJson() => {'Raw': value0}; + + int _sizeHint() { + int size = 1; + size = size + _i1.U8SequenceCodec.codec.sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(2, output); + _i1.U8SequenceCodec.codec.encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => identical(this, other) || other is Raw && _i4.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} + +class Address32 extends MultiAddress { + const Address32(this.value0); + + factory Address32._decode(_i1.Input input) { + return Address32(const _i1.U8ArrayCodec(32).decode(input)); + } + + /// [u8; 32] + final List value0; + + @override + Map> toJson() => {'Address32': value0.toList()}; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(32).sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(3, output); + const _i1.U8ArrayCodec(32).encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Address32 && _i4.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} + +class Address20 extends MultiAddress { + const Address20(this.value0); + + factory Address20._decode(_i1.Input input) { + return Address20(const _i1.U8ArrayCodec(20).decode(input)); + } + + /// [u8; 20] + final List value0; + + @override + Map> toJson() => {'Address20': value0.toList()}; + + int _sizeHint() { + int size = 1; + size = size + const _i1.U8ArrayCodec(20).sizeHint(value0); + return size; + } + + void encodeTo(_i1.Output output) { + _i1.U8Codec.codec.encodeTo(4, output); + const _i1.U8ArrayCodec(20).encodeTo(value0, output); + } + + @override + bool operator ==(Object other) => + identical(this, other) || other is Address20 && _i4.listsEqual(other.value0, value0); + + @override + int get hashCode => value0.hashCode; +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/proving_trie/trie_error.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/proving_trie/trie_error.dart new file mode 100644 index 00000000..f10fe5c0 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/proving_trie/trie_error.dart @@ -0,0 +1,85 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum TrieError { + invalidStateRoot('InvalidStateRoot', 0), + incompleteDatabase('IncompleteDatabase', 1), + valueAtIncompleteKey('ValueAtIncompleteKey', 2), + decoderError('DecoderError', 3), + invalidHash('InvalidHash', 4), + duplicateKey('DuplicateKey', 5), + extraneousNode('ExtraneousNode', 6), + extraneousValue('ExtraneousValue', 7), + extraneousHashReference('ExtraneousHashReference', 8), + invalidChildReference('InvalidChildReference', 9), + valueMismatch('ValueMismatch', 10), + incompleteProof('IncompleteProof', 11), + rootMismatch('RootMismatch', 12), + decodeError('DecodeError', 13); + + const TrieError(this.variantName, this.codecIndex); + + factory TrieError.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $TrieErrorCodec codec = $TrieErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $TrieErrorCodec with _i1.Codec { + const $TrieErrorCodec(); + + @override + TrieError decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return TrieError.invalidStateRoot; + case 1: + return TrieError.incompleteDatabase; + case 2: + return TrieError.valueAtIncompleteKey; + case 3: + return TrieError.decoderError; + case 4: + return TrieError.invalidHash; + case 5: + return TrieError.duplicateKey; + case 6: + return TrieError.extraneousNode; + case 7: + return TrieError.extraneousValue; + case 8: + return TrieError.extraneousHashReference; + case 9: + return TrieError.invalidChildReference; + case 10: + return TrieError.valueMismatch; + case 11: + return TrieError.incompleteProof; + case 12: + return TrieError.rootMismatch; + case 13: + return TrieError.decodeError; + default: + throw Exception('TrieError: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(TrieError value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/token_error.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/token_error.dart new file mode 100644 index 00000000..38bb9f28 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/token_error.dart @@ -0,0 +1,73 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum TokenError { + fundsUnavailable('FundsUnavailable', 0), + onlyProvider('OnlyProvider', 1), + belowMinimum('BelowMinimum', 2), + cannotCreate('CannotCreate', 3), + unknownAsset('UnknownAsset', 4), + frozen('Frozen', 5), + unsupported('Unsupported', 6), + cannotCreateHold('CannotCreateHold', 7), + notExpendable('NotExpendable', 8), + blocked('Blocked', 9); + + const TokenError(this.variantName, this.codecIndex); + + factory TokenError.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $TokenErrorCodec codec = $TokenErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $TokenErrorCodec with _i1.Codec { + const $TokenErrorCodec(); + + @override + TokenError decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return TokenError.fundsUnavailable; + case 1: + return TokenError.onlyProvider; + case 2: + return TokenError.belowMinimum; + case 3: + return TokenError.cannotCreate; + case 4: + return TokenError.unknownAsset; + case 5: + return TokenError.frozen; + case 6: + return TokenError.unsupported; + case 7: + return TokenError.cannotCreateHold; + case 8: + return TokenError.notExpendable; + case 9: + return TokenError.blocked; + default: + throw Exception('TokenError: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(TokenError value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_runtime/transactional_error.dart b/quantus_sdk/lib/generated/planck/types/sp_runtime/transactional_error.dart new file mode 100644 index 00000000..f556271e --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_runtime/transactional_error.dart @@ -0,0 +1,49 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +enum TransactionalError { + limitReached('LimitReached', 0), + noLayer('NoLayer', 1); + + const TransactionalError(this.variantName, this.codecIndex); + + factory TransactionalError.decode(_i1.Input input) { + return codec.decode(input); + } + + final String variantName; + + final int codecIndex; + + static const $TransactionalErrorCodec codec = $TransactionalErrorCodec(); + + String toJson() => variantName; + + _i2.Uint8List encode() { + return codec.encode(this); + } +} + +class $TransactionalErrorCodec with _i1.Codec { + const $TransactionalErrorCodec(); + + @override + TransactionalError decode(_i1.Input input) { + final index = _i1.U8Codec.codec.decode(input); + switch (index) { + case 0: + return TransactionalError.limitReached; + case 1: + return TransactionalError.noLayer; + default: + throw Exception('TransactionalError: Invalid variant index: "$index"'); + } + } + + @override + void encodeTo(TransactionalError value, _i1.Output output) { + _i1.U8Codec.codec.encodeTo(value.codecIndex, output); + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_version/runtime_version.dart b/quantus_sdk/lib/generated/planck/types/sp_version/runtime_version.dart new file mode 100644 index 00000000..9d10f100 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_version/runtime_version.dart @@ -0,0 +1,140 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i4; + +import 'package:polkadart/scale_codec.dart' as _i1; +import 'package:quiver/collection.dart' as _i5; + +import '../cow_1.dart' as _i2; +import '../cow_2.dart' as _i3; +import '../tuples.dart' as _i6; + +class RuntimeVersion { + const RuntimeVersion({ + required this.specName, + required this.implName, + required this.authoringVersion, + required this.specVersion, + required this.implVersion, + required this.apis, + required this.transactionVersion, + required this.systemVersion, + }); + + factory RuntimeVersion.decode(_i1.Input input) { + return codec.decode(input); + } + + /// Cow<'static, str> + final _i2.Cow specName; + + /// Cow<'static, str> + final _i2.Cow implName; + + /// u32 + final int authoringVersion; + + /// u32 + final int specVersion; + + /// u32 + final int implVersion; + + /// ApisVec + final _i3.Cow apis; + + /// u32 + final int transactionVersion; + + /// u8 + final int systemVersion; + + static const $RuntimeVersionCodec codec = $RuntimeVersionCodec(); + + _i4.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => { + 'specName': specName, + 'implName': implName, + 'authoringVersion': authoringVersion, + 'specVersion': specVersion, + 'implVersion': implVersion, + 'apis': apis.map((value) => [value.value0.toList(), value.value1]).toList(), + 'transactionVersion': transactionVersion, + 'systemVersion': systemVersion, + }; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is RuntimeVersion && + other.specName == specName && + other.implName == implName && + other.authoringVersion == authoringVersion && + other.specVersion == specVersion && + other.implVersion == implVersion && + _i5.listsEqual(other.apis, apis) && + other.transactionVersion == transactionVersion && + other.systemVersion == systemVersion; + + @override + int get hashCode => Object.hash( + specName, + implName, + authoringVersion, + specVersion, + implVersion, + apis, + transactionVersion, + systemVersion, + ); +} + +class $RuntimeVersionCodec with _i1.Codec { + const $RuntimeVersionCodec(); + + @override + void encodeTo(RuntimeVersion obj, _i1.Output output) { + _i1.StrCodec.codec.encodeTo(obj.specName, output); + _i1.StrCodec.codec.encodeTo(obj.implName, output); + _i1.U32Codec.codec.encodeTo(obj.authoringVersion, output); + _i1.U32Codec.codec.encodeTo(obj.specVersion, output); + _i1.U32Codec.codec.encodeTo(obj.implVersion, output); + const _i1.SequenceCodec<_i6.Tuple2, int>>( + _i6.Tuple2Codec, int>(_i1.U8ArrayCodec(8), _i1.U32Codec.codec), + ).encodeTo(obj.apis, output); + _i1.U32Codec.codec.encodeTo(obj.transactionVersion, output); + _i1.U8Codec.codec.encodeTo(obj.systemVersion, output); + } + + @override + RuntimeVersion decode(_i1.Input input) { + return RuntimeVersion( + specName: _i1.StrCodec.codec.decode(input), + implName: _i1.StrCodec.codec.decode(input), + authoringVersion: _i1.U32Codec.codec.decode(input), + specVersion: _i1.U32Codec.codec.decode(input), + implVersion: _i1.U32Codec.codec.decode(input), + apis: const _i1.SequenceCodec<_i6.Tuple2, int>>( + _i6.Tuple2Codec, int>(_i1.U8ArrayCodec(8), _i1.U32Codec.codec), + ).decode(input), + transactionVersion: _i1.U32Codec.codec.decode(input), + systemVersion: _i1.U8Codec.codec.decode(input), + ); + } + + @override + int sizeHint(RuntimeVersion obj) { + int size = 0; + size = size + const _i2.CowCodec().sizeHint(obj.specName); + size = size + const _i2.CowCodec().sizeHint(obj.implName); + size = size + _i1.U32Codec.codec.sizeHint(obj.authoringVersion); + size = size + _i1.U32Codec.codec.sizeHint(obj.specVersion); + size = size + _i1.U32Codec.codec.sizeHint(obj.implVersion); + size = size + const _i3.CowCodec().sizeHint(obj.apis); + size = size + _i1.U32Codec.codec.sizeHint(obj.transactionVersion); + size = size + _i1.U8Codec.codec.sizeHint(obj.systemVersion); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_weights/runtime_db_weight.dart b/quantus_sdk/lib/generated/planck/types/sp_weights/runtime_db_weight.dart new file mode 100644 index 00000000..aac5361b --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_weights/runtime_db_weight.dart @@ -0,0 +1,56 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class RuntimeDbWeight { + const RuntimeDbWeight({required this.read, required this.write}); + + factory RuntimeDbWeight.decode(_i1.Input input) { + return codec.decode(input); + } + + /// u64 + final BigInt read; + + /// u64 + final BigInt write; + + static const $RuntimeDbWeightCodec codec = $RuntimeDbWeightCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'read': read, 'write': write}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is RuntimeDbWeight && other.read == read && other.write == write; + + @override + int get hashCode => Object.hash(read, write); +} + +class $RuntimeDbWeightCodec with _i1.Codec { + const $RuntimeDbWeightCodec(); + + @override + void encodeTo(RuntimeDbWeight obj, _i1.Output output) { + _i1.U64Codec.codec.encodeTo(obj.read, output); + _i1.U64Codec.codec.encodeTo(obj.write, output); + } + + @override + RuntimeDbWeight decode(_i1.Input input) { + return RuntimeDbWeight(read: _i1.U64Codec.codec.decode(input), write: _i1.U64Codec.codec.decode(input)); + } + + @override + int sizeHint(RuntimeDbWeight obj) { + int size = 0; + size = size + _i1.U64Codec.codec.sizeHint(obj.read); + size = size + _i1.U64Codec.codec.sizeHint(obj.write); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/sp_weights/weight_v2/weight.dart b/quantus_sdk/lib/generated/planck/types/sp_weights/weight_v2/weight.dart new file mode 100644 index 00000000..8d9a8883 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/sp_weights/weight_v2/weight.dart @@ -0,0 +1,59 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'dart:typed_data' as _i2; + +import 'package:polkadart/scale_codec.dart' as _i1; + +class Weight { + const Weight({required this.refTime, required this.proofSize}); + + factory Weight.decode(_i1.Input input) { + return codec.decode(input); + } + + /// u64 + final BigInt refTime; + + /// u64 + final BigInt proofSize; + + static const $WeightCodec codec = $WeightCodec(); + + _i2.Uint8List encode() { + return codec.encode(this); + } + + Map toJson() => {'refTime': refTime, 'proofSize': proofSize}; + + @override + bool operator ==(Object other) => + identical(this, other) || other is Weight && other.refTime == refTime && other.proofSize == proofSize; + + @override + int get hashCode => Object.hash(refTime, proofSize); +} + +class $WeightCodec with _i1.Codec { + const $WeightCodec(); + + @override + void encodeTo(Weight obj, _i1.Output output) { + _i1.CompactBigIntCodec.codec.encodeTo(obj.refTime, output); + _i1.CompactBigIntCodec.codec.encodeTo(obj.proofSize, output); + } + + @override + Weight decode(_i1.Input input) { + return Weight( + refTime: _i1.CompactBigIntCodec.codec.decode(input), + proofSize: _i1.CompactBigIntCodec.codec.decode(input), + ); + } + + @override + int sizeHint(Weight obj) { + int size = 0; + size = size + _i1.CompactBigIntCodec.codec.sizeHint(obj.refTime); + size = size + _i1.CompactBigIntCodec.codec.sizeHint(obj.proofSize); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/tuples.dart b/quantus_sdk/lib/generated/planck/types/tuples.dart new file mode 100644 index 00000000..2f309fdb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/tuples.dart @@ -0,0 +1,37 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +class Tuple2 { + const Tuple2(this.value0, this.value1); + + final T0 value0; + + final T1 value1; +} + +class Tuple2Codec with _i1.Codec> { + const Tuple2Codec(this.codec0, this.codec1); + + final _i1.Codec codec0; + + final _i1.Codec codec1; + + @override + void encodeTo(Tuple2 tuple, _i1.Output output) { + codec0.encodeTo(tuple.value0, output); + codec1.encodeTo(tuple.value1, output); + } + + @override + Tuple2 decode(_i1.Input input) { + return Tuple2(codec0.decode(input), codec1.decode(input)); + } + + @override + int sizeHint(Tuple2 tuple) { + int size = 0; + size += codec0.sizeHint(tuple.value0); + size += codec1.sizeHint(tuple.value1); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/tuples_1.dart b/quantus_sdk/lib/generated/planck/types/tuples_1.dart new file mode 100644 index 00000000..2f309fdb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/tuples_1.dart @@ -0,0 +1,37 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +class Tuple2 { + const Tuple2(this.value0, this.value1); + + final T0 value0; + + final T1 value1; +} + +class Tuple2Codec with _i1.Codec> { + const Tuple2Codec(this.codec0, this.codec1); + + final _i1.Codec codec0; + + final _i1.Codec codec1; + + @override + void encodeTo(Tuple2 tuple, _i1.Output output) { + codec0.encodeTo(tuple.value0, output); + codec1.encodeTo(tuple.value1, output); + } + + @override + Tuple2 decode(_i1.Input input) { + return Tuple2(codec0.decode(input), codec1.decode(input)); + } + + @override + int sizeHint(Tuple2 tuple) { + int size = 0; + size += codec0.sizeHint(tuple.value0); + size += codec1.sizeHint(tuple.value1); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/tuples_2.dart b/quantus_sdk/lib/generated/planck/types/tuples_2.dart new file mode 100644 index 00000000..aa48d3ff --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/tuples_2.dart @@ -0,0 +1,43 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +class Tuple3 { + const Tuple3(this.value0, this.value1, this.value2); + + final T0 value0; + + final T1 value1; + + final T2 value2; +} + +class Tuple3Codec with _i1.Codec> { + const Tuple3Codec(this.codec0, this.codec1, this.codec2); + + final _i1.Codec codec0; + + final _i1.Codec codec1; + + final _i1.Codec codec2; + + @override + void encodeTo(Tuple3 tuple, _i1.Output output) { + codec0.encodeTo(tuple.value0, output); + codec1.encodeTo(tuple.value1, output); + codec2.encodeTo(tuple.value2, output); + } + + @override + Tuple3 decode(_i1.Input input) { + return Tuple3(codec0.decode(input), codec1.decode(input), codec2.decode(input)); + } + + @override + int sizeHint(Tuple3 tuple) { + int size = 0; + size += codec0.sizeHint(tuple.value0); + size += codec1.sizeHint(tuple.value1); + size += codec2.sizeHint(tuple.value2); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/tuples_3.dart b/quantus_sdk/lib/generated/planck/types/tuples_3.dart new file mode 100644 index 00000000..933476a6 --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/tuples_3.dart @@ -0,0 +1,61 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +class Tuple5 { + const Tuple5(this.value0, this.value1, this.value2, this.value3, this.value4); + + final T0 value0; + + final T1 value1; + + final T2 value2; + + final T3 value3; + + final T4 value4; +} + +class Tuple5Codec with _i1.Codec> { + const Tuple5Codec(this.codec0, this.codec1, this.codec2, this.codec3, this.codec4); + + final _i1.Codec codec0; + + final _i1.Codec codec1; + + final _i1.Codec codec2; + + final _i1.Codec codec3; + + final _i1.Codec codec4; + + @override + void encodeTo(Tuple5 tuple, _i1.Output output) { + codec0.encodeTo(tuple.value0, output); + codec1.encodeTo(tuple.value1, output); + codec2.encodeTo(tuple.value2, output); + codec3.encodeTo(tuple.value3, output); + codec4.encodeTo(tuple.value4, output); + } + + @override + Tuple5 decode(_i1.Input input) { + return Tuple5( + codec0.decode(input), + codec1.decode(input), + codec2.decode(input), + codec3.decode(input), + codec4.decode(input), + ); + } + + @override + int sizeHint(Tuple5 tuple) { + int size = 0; + size += codec0.sizeHint(tuple.value0); + size += codec1.sizeHint(tuple.value1); + size += codec2.sizeHint(tuple.value2); + size += codec3.sizeHint(tuple.value3); + size += codec4.sizeHint(tuple.value4); + return size; + } +} diff --git a/quantus_sdk/lib/generated/planck/types/tuples_4.dart b/quantus_sdk/lib/generated/planck/types/tuples_4.dart new file mode 100644 index 00000000..34780bfb --- /dev/null +++ b/quantus_sdk/lib/generated/planck/types/tuples_4.dart @@ -0,0 +1,128 @@ +// ignore_for_file: no_leading_underscores_for_library_prefixes +import 'package:polkadart/scale_codec.dart' as _i1; + +class Tuple11 { + const Tuple11( + this.value0, + this.value1, + this.value2, + this.value3, + this.value4, + this.value5, + this.value6, + this.value7, + this.value8, + this.value9, + this.value10, + ); + + final T0 value0; + + final T1 value1; + + final T2 value2; + + final T3 value3; + + final T4 value4; + + final T5 value5; + + final T6 value6; + + final T7 value7; + + final T8 value8; + + final T9 value9; + + final T10 value10; +} + +class Tuple11Codec + with _i1.Codec> { + const Tuple11Codec( + this.codec0, + this.codec1, + this.codec2, + this.codec3, + this.codec4, + this.codec5, + this.codec6, + this.codec7, + this.codec8, + this.codec9, + this.codec10, + ); + + final _i1.Codec codec0; + + final _i1.Codec codec1; + + final _i1.Codec codec2; + + final _i1.Codec codec3; + + final _i1.Codec codec4; + + final _i1.Codec codec5; + + final _i1.Codec codec6; + + final _i1.Codec codec7; + + final _i1.Codec codec8; + + final _i1.Codec codec9; + + final _i1.Codec codec10; + + @override + void encodeTo(Tuple11 tuple, _i1.Output output) { + codec0.encodeTo(tuple.value0, output); + codec1.encodeTo(tuple.value1, output); + codec2.encodeTo(tuple.value2, output); + codec3.encodeTo(tuple.value3, output); + codec4.encodeTo(tuple.value4, output); + codec5.encodeTo(tuple.value5, output); + codec6.encodeTo(tuple.value6, output); + codec7.encodeTo(tuple.value7, output); + codec8.encodeTo(tuple.value8, output); + codec9.encodeTo(tuple.value9, output); + codec10.encodeTo(tuple.value10, output); + } + + @override + Tuple11 decode(_i1.Input input) { + return Tuple11( + codec0.decode(input), + codec1.decode(input), + codec2.decode(input), + codec3.decode(input), + codec4.decode(input), + codec5.decode(input), + codec6.decode(input), + codec7.decode(input), + codec8.decode(input), + codec9.decode(input), + codec10.decode(input), + ); + } + + @override + int sizeHint(Tuple11 tuple) { + int size = 0; + size += codec0.sizeHint(tuple.value0); + size += codec1.sizeHint(tuple.value1); + size += codec2.sizeHint(tuple.value2); + size += codec3.sizeHint(tuple.value3); + size += codec4.sizeHint(tuple.value4); + size += codec5.sizeHint(tuple.value5); + size += codec6.sizeHint(tuple.value6); + size += codec7.sizeHint(tuple.value7); + size += codec8.sizeHint(tuple.value8); + size += codec9.sizeHint(tuple.value9); + size += codec10.sizeHint(tuple.value10); + return size; + } +} diff --git a/quantus_sdk/lib/quantus_sdk.dart b/quantus_sdk/lib/quantus_sdk.dart index e258c28a..26e6c46b 100644 --- a/quantus_sdk/lib/quantus_sdk.dart +++ b/quantus_sdk/lib/quantus_sdk.dart @@ -5,8 +5,8 @@ import 'package:quantus_sdk/src/services/settings_service.dart'; import 'src/rust/frb_generated.dart'; -export 'generated/schrodinger/pallets/balances.dart'; -export 'generated/schrodinger/types/quantus_runtime/runtime_call.dart'; +export 'generated/planck/pallets/balances.dart'; +export 'generated/planck/types/quantus_runtime/runtime_call.dart'; export 'src/constants/app_constants.dart'; export 'src/extensions/color_extensions.dart'; export 'src/extensions/context_extension.dart'; @@ -39,6 +39,14 @@ export 'src/models/raid_stats.dart'; // should probably expise all of crypto.dart through substrateservice instead export 'src/rust/api/crypto.dart' hide crystalAlice, crystalCharlie, crystalBob; export 'src/rust/api/ur.dart'; +// Re-export raw FFI wormhole types (prefixed with 'Ffi' via the service layer for clarity) +// Most users should use WormholeService instead +export 'src/rust/api/wormhole.dart' + show + WormholePairResult, + WormholeError, + CircuitConfig, + CircuitGenerationResult; export 'src/services/account_discovery_service.dart'; export 'src/services/accounts_service.dart'; export 'src/services/address_formatting_service.dart'; @@ -60,6 +68,12 @@ export 'src/services/substrate_service.dart'; export 'src/services/swap_service.dart'; export 'src/services/taskmaster_service.dart'; export 'src/services/senoti_service.dart'; +export 'src/services/wormhole_service.dart'; +export 'src/services/wormhole_utxo_service.dart'; +export 'src/services/wormhole_address_manager.dart'; +export 'src/services/wormhole_withdrawal_service.dart'; +export 'src/services/mnemonic_provider.dart'; +export 'src/services/circuit_manager.dart'; export 'src/extensions/account_extension.dart'; export 'src/quantus_signing_payload.dart'; export 'src/quantus_payload_parser.dart'; diff --git a/quantus_sdk/lib/src/extensions/duration_extension.dart b/quantus_sdk/lib/src/extensions/duration_extension.dart index c4be7ad4..89a77bc0 100644 --- a/quantus_sdk/lib/src/extensions/duration_extension.dart +++ b/quantus_sdk/lib/src/extensions/duration_extension.dart @@ -1,4 +1,4 @@ -import 'package:quantus_sdk/generated/schrodinger/types/qp_scheduler/block_number_or_timestamp.dart' as qp; +import 'package:quantus_sdk/generated/planck/types/qp_scheduler/block_number_or_timestamp.dart' as qp; extension DurationToTimestampExtension on Duration { qp.Timestamp get qpTimestamp => qp.Timestamp(BigInt.from(inSeconds) * BigInt.from(1000)); diff --git a/quantus_sdk/lib/src/rust/api/crypto.dart b/quantus_sdk/lib/src/rust/api/crypto.dart index 0224a262..40c73f19 100644 --- a/quantus_sdk/lib/src/rust/api/crypto.dart +++ b/quantus_sdk/lib/src/rust/api/crypto.dart @@ -45,14 +45,11 @@ Keypair crystalCharlie() => RustLib.instance.api.crateApiCryptoCrystalCharlie(); Uint8List deriveHdPath({required List seed, required String path}) => RustLib.instance.api.crateApiCryptoDeriveHdPath(seed: seed, path: path); -int get publicKeySize => - RustLib.instance.api.crateApiCryptoPublicKeyBytes().toInt(); // these are ussize and anyway small +BigInt publicKeyBytes() => RustLib.instance.api.crateApiCryptoPublicKeyBytes(); -int get secretKeySize => - RustLib.instance.api.crateApiCryptoSecretKeyBytes().toInt(); // these are ussize and anyway small +BigInt secretKeyBytes() => RustLib.instance.api.crateApiCryptoSecretKeyBytes(); -int get signatureSize => - RustLib.instance.api.crateApiCryptoSignatureBytes().toInt(); // these are ussize and anyway small +BigInt signatureBytes() => RustLib.instance.api.crateApiCryptoSignatureBytes(); // Rust type: RustOpaqueMoi> abstract class HdLatticeError implements RustOpaqueInterface {} diff --git a/quantus_sdk/lib/src/rust/api/wormhole.dart b/quantus_sdk/lib/src/rust/api/wormhole.dart new file mode 100644 index 00000000..30286bdc --- /dev/null +++ b/quantus_sdk/lib/src/rust/api/wormhole.dart @@ -0,0 +1,660 @@ +// This file is automatically generated, so please do not edit it. +// @generated by `flutter_rust_bridge`@ 2.11.1. + +// ignore_for_file: invalid_use_of_internal_member, unused_import, unnecessary_import + +import '../frb_generated.dart'; +import 'package:flutter_rust_bridge/flutter_rust_bridge_for_generated.dart'; + +// These functions are ignored because they are not marked as `pub`: `clone_prover`, `compute_block_hash_internal`, `compute_transfer_proof_leaf_hash`, `parse_hex_32`, `parse_hex`, `ss58_to_bytes` +// These function are ignored because they are on traits that is not defined in current crate (put an empty `#[frb]` on it to unignore): `clone`, `clone`, `clone`, `clone`, `clone`, `clone`, `clone`, `fmt`, `fmt`, `fmt`, `fmt`, `fmt`, `fmt`, `fmt`, `fmt`, `fmt`, `from` + +/// Derive a wormhole address pair from a mnemonic. +/// +/// # Arguments +/// * `mnemonic` - The 24-word BIP39 mnemonic phrase +/// * `purpose` - The purpose index (0 = mobile sends, 1 = miner rewards) +/// * `index` - The address index within the purpose +/// +/// # Returns +/// A `WormholePairResult` containing the address, first_hash, and secret. +/// +/// # Example +/// ```ignore +/// let result = derive_wormhole_pair( +/// "word1 word2 ... word24".to_string(), +/// 1, // purpose: miner rewards +/// 0, // index: first address +/// )?; +/// println!("Rewards preimage (for --rewards-preimage): {}", result.first_hash_ss58); +/// println!("Wormhole address (on-chain account): {}", result.address); +/// ``` +WormholePairResult deriveWormholePair({required String mnemonic, required int purpose, required int index}) => + RustLib.instance.api.crateApiWormholeDeriveWormholePair(mnemonic: mnemonic, purpose: purpose, index: index); + +/// Convert a first_hash (rewards preimage) to its corresponding wormhole address. +/// +/// This computes the address exactly as the chain and ZK circuit do: +/// - Convert first_hash (32 bytes) to 4 field elements using unsafe_digest_bytes_to_felts +/// (8 bytes per element) +/// - Hash once without padding using hash_variable_length +/// +/// The wormhole address derivation is: +/// - secret -> hash(salt + secret) = first_hash (preimage for node) +/// - first_hash -> hash(first_hash) = address +/// +/// # Arguments +/// * `first_hash_hex` - The first_hash bytes as hex string (with or without 0x prefix) +/// +/// # Returns +/// The wormhole address as SS58 string. +String firstHashToAddress({required String firstHashHex}) => + RustLib.instance.api.crateApiWormholeFirstHashToAddress(firstHashHex: firstHashHex); + +/// Get the wormhole HD derivation path for a given purpose and index. +/// +/// # Arguments +/// * `purpose` - The purpose index (0 = mobile sends, 1 = miner rewards) +/// * `index` - The address index within the purpose +/// +/// # Returns +/// The full HD derivation path string. +String getWormholeDerivationPath({required int purpose, required int index}) => + RustLib.instance.api.crateApiWormholeGetWormholeDerivationPath(purpose: purpose, index: index); + +/// Compute the nullifier for a wormhole UTXO. +/// +/// The nullifier is a deterministic hash of (secret, transfer_count) that prevents +/// double-spending. Once revealed on-chain, the UTXO cannot be spent again. +/// +/// # Arguments +/// * `secret_hex` - The wormhole secret (32 bytes, hex with 0x prefix) +/// * `transfer_count` - The transfer count from NativeTransferred event +/// +/// # Returns +/// The nullifier as hex string with 0x prefix. +String computeNullifier({required String secretHex, required BigInt transferCount}) => + RustLib.instance.api.crateApiWormholeComputeNullifier(secretHex: secretHex, transferCount: transferCount); + +/// Derive the wormhole address from a secret. +/// +/// This computes the unspendable account address that corresponds to the given secret. +/// +/// # Arguments +/// * `secret_hex` - The wormhole secret (32 bytes, hex with 0x prefix) +/// +/// # Returns +/// The wormhole address as SS58 string. +String deriveAddressFromSecret({required String secretHex}) => + RustLib.instance.api.crateApiWormholeDeriveAddressFromSecret(secretHex: secretHex); + +/// Quantize an amount from planck (12 decimals) to the circuit format (2 decimals). +/// +/// The circuit uses quantized amounts for privacy. This function converts +/// a full-precision amount to the quantized format. +/// +/// # Arguments +/// * `amount_planck` - Amount in planck (smallest unit, 12 decimal places) +/// +/// # Returns +/// Quantized amount (2 decimal places) that can be used in proof outputs. +int quantizeAmount({required BigInt amountPlanck}) => + RustLib.instance.api.crateApiWormholeQuantizeAmount(amountPlanck: amountPlanck); + +/// Dequantize an amount from circuit format (2 decimals) back to planck (12 decimals). +/// +/// # Arguments +/// * `quantized_amount` - Amount in circuit format (2 decimal places) +/// +/// # Returns +/// Amount in planck (12 decimal places). +BigInt dequantizeAmount({required int quantizedAmount}) => + RustLib.instance.api.crateApiWormholeDequantizeAmount(quantizedAmount: quantizedAmount); + +/// Compute the output amount after fee deduction. +/// +/// The circuit enforces that output amounts don't exceed input minus fee. +/// Use this function to compute the correct output amount for proof generation. +/// +/// Formula: `output = input * (10000 - fee_bps) / 10000` +/// +/// # Arguments +/// * `input_amount` - Input amount in quantized units (from quantize_amount) +/// * `fee_bps` - Fee rate in basis points (e.g., 10 = 0.1%) +/// +/// # Returns +/// Maximum output amount in quantized units. +/// +/// # Example +/// ```ignore +/// let input = quantize_amount(383561629241)?; // 38 in quantized +/// let output = compute_output_amount(input, 10); // 37 (after 0.1% fee) +/// ``` +int computeOutputAmount({required int inputAmount, required int feeBps}) => + RustLib.instance.api.crateApiWormholeComputeOutputAmount(inputAmount: inputAmount, feeBps: feeBps); + +/// Get the batch size for proof aggregation. +/// +/// # Arguments +/// * `bins_dir` - Path to circuit binaries directory +/// +/// # Returns +/// Number of proofs that must be aggregated together. +BigInt getAggregationBatchSize({required String binsDir}) => + RustLib.instance.api.crateApiWormholeGetAggregationBatchSize(binsDir: binsDir); + +/// Encode digest logs from RPC format to SCALE-encoded bytes. +/// +/// The RPC returns digest logs as an array of hex-encoded SCALE bytes. +/// This function properly encodes them as a SCALE Vec which +/// matches what the circuit expects. +/// +/// # Arguments +/// * `logs_hex` - Array of hex-encoded digest log items from RPC +/// +/// # Returns +/// SCALE-encoded digest as hex string (with 0x prefix), padded/truncated to 110 bytes. +/// +/// # Example +/// ```ignore +/// // From RPC: header.digest.logs = ["0x0642...", "0x0561..."] +/// let digest_hex = encode_digest_from_rpc_logs(vec!["0x0642...".into(), "0x0561...".into()])?; +/// ``` +String encodeDigestFromRpcLogs({required List logsHex}) => + RustLib.instance.api.crateApiWormholeEncodeDigestFromRpcLogs(logsHex: logsHex); + +/// Compute the full storage key for a wormhole TransferProof. +/// +/// This key can be used with `state_getReadProof` RPC to fetch the Merkle proof +/// needed for ZK proof generation. +/// +/// The storage key is: module_prefix ++ storage_prefix ++ poseidon_hash(key) +/// +/// # Arguments +/// * `secret_hex` - The wormhole secret (32 bytes, hex with 0x prefix) +/// * `transfer_count` - The transfer count from NativeTransferred event +/// * `funding_account` - The account that sent the funds (SS58 format) +/// * `amount` - The exact transfer amount in planck +/// +/// # Returns +/// The full storage key as hex string with 0x prefix. +String computeTransferProofStorageKey({ + required String secretHex, + required BigInt transferCount, + required String fundingAccount, + required BigInt amount, +}) => RustLib.instance.api.crateApiWormholeComputeTransferProofStorageKey( + secretHex: secretHex, + transferCount: transferCount, + fundingAccount: fundingAccount, + amount: amount, +); + +/// Create a new proof generator. +/// +/// This loads ~171MB of circuit data, so it's expensive. Call once and reuse. +/// +/// # Arguments +/// * `bins_dir` - Path to directory containing prover.bin and common.bin +Future createProofGenerator({required String binsDir}) => + RustLib.instance.api.crateApiWormholeCreateProofGenerator(binsDir: binsDir); + +/// Create a new proof aggregator. +/// +/// # Arguments +/// * `bins_dir` - Path to directory containing aggregator circuit files +Future createProofAggregator({required String binsDir}) => + RustLib.instance.api.crateApiWormholeCreateProofAggregator(binsDir: binsDir); + +/// Compute block hash from header components. +/// +/// This matches the Poseidon block hash computation used by the Quantus chain. +/// The hash is computed over the SCALE-encoded header components. +/// +/// # Arguments +/// * `parent_hash_hex` - Parent block hash (32 bytes, hex with 0x prefix) +/// * `state_root_hex` - State root (32 bytes, hex with 0x prefix) +/// * `extrinsics_root_hex` - Extrinsics root (32 bytes, hex with 0x prefix) +/// * `block_number` - Block number +/// * `digest_hex` - SCALE-encoded digest (hex with 0x prefix, from encode_digest_from_rpc_logs) +/// +/// # Returns +/// Block hash as hex string with 0x prefix. +String computeBlockHash({ + required String parentHashHex, + required String stateRootHex, + required String extrinsicsRootHex, + required int blockNumber, + required String digestHex, +}) => RustLib.instance.api.crateApiWormholeComputeBlockHash( + parentHashHex: parentHashHex, + stateRootHex: stateRootHex, + extrinsicsRootHex: extrinsicsRootHex, + blockNumber: blockNumber, + digestHex: digestHex, +); + +/// Generate circuit binary files for ZK proof generation. +/// +/// This is a long-running operation (10-30 minutes) that generates the +/// circuit binaries needed for wormhole withdrawal proofs. +/// +/// # Arguments +/// * `output_dir` - Directory to write the binaries to +/// * `num_leaf_proofs` - Number of leaf proofs per aggregation (typically 8) +/// +/// # Returns +/// A `CircuitGenerationResult` indicating success or failure. +/// +/// # Generated Files +/// - `prover.bin` - Prover circuit data (~163MB) +/// - `common.bin` - Common circuit data +/// - `verifier.bin` - Verifier circuit data +/// - `dummy_proof.bin` - Dummy proof for aggregation padding +/// - `aggregated_common.bin` - Aggregated circuit common data +/// - `aggregated_verifier.bin` - Aggregated circuit verifier data +/// - `config.json` - Configuration with hashes for integrity verification +Future generateCircuitBinaries({required String outputDir, required int numLeafProofs}) => + RustLib.instance.api.crateApiWormholeGenerateCircuitBinaries(outputDir: outputDir, numLeafProofs: numLeafProofs); + +/// Check if circuit binaries exist and are valid in a directory. +/// +/// # Arguments +/// * `bins_dir` - Directory containing the circuit binaries +/// +/// # Returns +/// True if all required files exist, false otherwise. +bool checkCircuitBinariesExist({required String binsDir}) => + RustLib.instance.api.crateApiWormholeCheckCircuitBinariesExist(binsDir: binsDir); + +// Rust type: RustOpaqueMoi> +abstract class WormholeProofAggregator implements RustOpaqueInterface { + /// Add a proof to the aggregation buffer. + /// + /// # Arguments + /// * `proof_hex` - The serialized proof bytes (hex encoded with 0x prefix) + Future addProof({required String proofHex}); + + /// Aggregate all proofs in the buffer. + /// + /// If fewer than `batch_size` proofs have been added, the remaining + /// slots are filled with dummy proofs automatically. + /// + /// # Returns + /// The aggregated proof. + Future aggregate(); + + /// Get the batch size (number of proofs per aggregation). + Future batchSize(); + + /// Clear the proof buffer without aggregating. + Future clear(); + + // HINT: Make it `#[frb(sync)]` to let it become the default constructor of Dart class. + /// Create a new proof aggregator from circuit files. + /// + /// # Arguments + /// * `bins_dir` - Path to directory containing aggregator circuit files + /// + /// # Returns + /// A new proof aggregator instance. + static Future newInstance({required String binsDir}) => + RustLib.instance.api.crateApiWormholeWormholeProofAggregatorNew(binsDir: binsDir); + + /// Get the number of proofs currently in the buffer. + Future proofCount(); +} + +/// Result of proof aggregation. +class AggregatedProof { + /// The serialized aggregated proof bytes (hex encoded). + final String proofHex; + + /// Number of real proofs in the batch (rest are dummies). + final BigInt numRealProofs; + + const AggregatedProof({required this.proofHex, required this.numRealProofs}); + + @override + int get hashCode => proofHex.hashCode ^ numRealProofs.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is AggregatedProof && + runtimeType == other.runtimeType && + proofHex == other.proofHex && + numRealProofs == other.numRealProofs; +} + +/// Block header data needed for proof generation. +class BlockHeaderData { + /// Parent block hash (hex encoded). + final String parentHashHex; + + /// State root of the block (hex encoded). + final String stateRootHex; + + /// Extrinsics root of the block (hex encoded). + final String extrinsicsRootHex; + + /// Block number. + final int blockNumber; + + /// Encoded digest (hex encoded, up to 110 bytes). + final String digestHex; + + const BlockHeaderData({ + required this.parentHashHex, + required this.stateRootHex, + required this.extrinsicsRootHex, + required this.blockNumber, + required this.digestHex, + }); + + @override + int get hashCode => + parentHashHex.hashCode ^ + stateRootHex.hashCode ^ + extrinsicsRootHex.hashCode ^ + blockNumber.hashCode ^ + digestHex.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is BlockHeaderData && + runtimeType == other.runtimeType && + parentHashHex == other.parentHashHex && + stateRootHex == other.stateRootHex && + extrinsicsRootHex == other.extrinsicsRootHex && + blockNumber == other.blockNumber && + digestHex == other.digestHex; +} + +/// Configuration loaded from circuit binaries directory. +class CircuitConfig { + /// Number of leaf proofs in an aggregation batch. + final BigInt numLeafProofs; + + const CircuitConfig({required this.numLeafProofs}); + + /// Load configuration from a circuit binaries directory. + static Future load({required String binsDir}) => + RustLib.instance.api.crateApiWormholeCircuitConfigLoad(binsDir: binsDir); + + @override + int get hashCode => numLeafProofs.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is CircuitConfig && runtimeType == other.runtimeType && numLeafProofs == other.numLeafProofs; +} + +/// Result of circuit binary generation +class CircuitGenerationResult { + /// Whether generation succeeded + final bool success; + + /// Error message if failed + final String? error; + + /// Path to the generated binaries directory + final String? outputDir; + + const CircuitGenerationResult({required this.success, this.error, this.outputDir}); + + @override + int get hashCode => success.hashCode ^ error.hashCode ^ outputDir.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is CircuitGenerationResult && + runtimeType == other.runtimeType && + success == other.success && + error == other.error && + outputDir == other.outputDir; +} + +/// Result of proof generation. +class GeneratedProof { + /// The serialized proof bytes (hex encoded). + final String proofHex; + + /// The nullifier for this UTXO (hex encoded) - used to track spent UTXOs. + final String nullifierHex; + + const GeneratedProof({required this.proofHex, required this.nullifierHex}); + + @override + int get hashCode => proofHex.hashCode ^ nullifierHex.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is GeneratedProof && + runtimeType == other.runtimeType && + proofHex == other.proofHex && + nullifierHex == other.nullifierHex; +} + +/// Output assignment for a proof - where the funds go. +class ProofOutputAssignment { + /// Amount for output 1 (quantized to 2 decimal places). + final int outputAmount1; + + /// Exit account for output 1 (SS58 address). + final String exitAccount1; + + /// Amount for output 2 (quantized, 0 if unused). + final int outputAmount2; + + /// Exit account for output 2 (SS58 address, empty if unused). + final String exitAccount2; + + const ProofOutputAssignment({ + required this.outputAmount1, + required this.exitAccount1, + required this.outputAmount2, + required this.exitAccount2, + }); + + @override + int get hashCode => outputAmount1.hashCode ^ exitAccount1.hashCode ^ outputAmount2.hashCode ^ exitAccount2.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ProofOutputAssignment && + runtimeType == other.runtimeType && + outputAmount1 == other.outputAmount1 && + exitAccount1 == other.exitAccount1 && + outputAmount2 == other.outputAmount2 && + exitAccount2 == other.exitAccount2; +} + +/// Storage proof data for the transfer. +class StorageProofData { + /// Raw proof nodes from the state trie (each node is hex encoded). + final List proofNodesHex; + + /// State root the proof is against (hex encoded). + final String stateRootHex; + + const StorageProofData({required this.proofNodesHex, required this.stateRootHex}); + + @override + int get hashCode => proofNodesHex.hashCode ^ stateRootHex.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is StorageProofData && + runtimeType == other.runtimeType && + proofNodesHex == other.proofNodesHex && + stateRootHex == other.stateRootHex; +} + +/// Error type for wormhole operations +class WormholeError implements FrbException { + final String message; + + const WormholeError({required this.message}); + + /// Returns the error message as a string for display. + @override + String toString() => RustLib.instance.api.crateApiWormholeWormholeErrorToDisplayString(that: this); + + @override + int get hashCode => message.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || other is WormholeError && runtimeType == other.runtimeType && message == other.message; +} + +/// Result of wormhole pair derivation +class WormholePairResult { + /// The wormhole address as SS58 (the on-chain account) + final String address; + + /// The raw address bytes (32 bytes, hex encoded) + final String addressHex; + + /// The first hash / rewards preimage as SS58 (pass to --rewards-preimage) + final String firstHashSs58; + + /// The first hash / rewards preimage bytes (32 bytes, hex encoded) + final String firstHashHex; + + /// The secret bytes (32 bytes, hex encoded) - SENSITIVE, needed for ZK proofs + final String secretHex; + + const WormholePairResult({ + required this.address, + required this.addressHex, + required this.firstHashSs58, + required this.firstHashHex, + required this.secretHex, + }); + + @override + int get hashCode => + address.hashCode ^ addressHex.hashCode ^ firstHashSs58.hashCode ^ firstHashHex.hashCode ^ secretHex.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is WormholePairResult && + runtimeType == other.runtimeType && + address == other.address && + addressHex == other.addressHex && + firstHashSs58 == other.firstHashSs58 && + firstHashHex == other.firstHashHex && + secretHex == other.secretHex; +} + +/// Opaque handle to a proof generator. +/// +/// The generator is expensive to initialize (loads ~171MB of circuit data), +/// so it should be created once and reused for all proof generations. +class WormholeProofGenerator { + final String binsDir; + + const WormholeProofGenerator({required this.binsDir}); + + /// Generate a proof for a wormhole withdrawal. + /// + /// # Arguments + /// * `utxo` - The UTXO to spend + /// * `output` - Where to send the funds + /// * `fee_bps` - Fee in basis points + /// * `block_header` - Block header for the proof + /// * `storage_proof` - Storage proof for the transfer + /// + /// # Returns + /// The generated proof and nullifier. + Future generateProof({ + required WormholeUtxo utxo, + required ProofOutputAssignment output, + required int feeBps, + required BlockHeaderData blockHeader, + required StorageProofData storageProof, + }) => RustLib.instance.api.crateApiWormholeWormholeProofGeneratorGenerateProof( + that: this, + utxo: utxo, + output: output, + feeBps: feeBps, + blockHeader: blockHeader, + storageProof: storageProof, + ); + + // HINT: Make it `#[frb(sync)]` to let it become the default constructor of Dart class. + /// Create a new proof generator from circuit files. + /// + /// # Arguments + /// * `bins_dir` - Path to directory containing prover.bin and common.bin + /// + /// # Returns + /// A new proof generator instance. + static Future newInstance({required String binsDir}) => + RustLib.instance.api.crateApiWormholeWormholeProofGeneratorNew(binsDir: binsDir); + + @override + int get hashCode => binsDir.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is WormholeProofGenerator && runtimeType == other.runtimeType && binsDir == other.binsDir; +} + +/// A wormhole UTXO (unspent transaction output) - FFI-friendly version. +/// +/// Represents an unspent wormhole deposit that can be used as input +/// for generating a proof. +class WormholeUtxo { + /// The secret used to derive the wormhole address (hex encoded with 0x prefix). + final String secretHex; + + /// Amount in planck (12 decimal places). + final BigInt amount; + + /// Transfer count from the NativeTransferred event. + final BigInt transferCount; + + /// The funding account (sender of the original transfer) - hex encoded. + final String fundingAccountHex; + + /// Block hash where the transfer was recorded - hex encoded. + final String blockHashHex; + + const WormholeUtxo({ + required this.secretHex, + required this.amount, + required this.transferCount, + required this.fundingAccountHex, + required this.blockHashHex, + }); + + @override + int get hashCode => + secretHex.hashCode ^ + amount.hashCode ^ + transferCount.hashCode ^ + fundingAccountHex.hashCode ^ + blockHashHex.hashCode; + + @override + bool operator ==(Object other) => + identical(this, other) || + other is WormholeUtxo && + runtimeType == other.runtimeType && + secretHex == other.secretHex && + amount == other.amount && + transferCount == other.transferCount && + fundingAccountHex == other.fundingAccountHex && + blockHashHex == other.blockHashHex; +} diff --git a/quantus_sdk/lib/src/rust/frb_generated.dart b/quantus_sdk/lib/src/rust/frb_generated.dart index f36ec283..58124260 100644 --- a/quantus_sdk/lib/src/rust/frb_generated.dart +++ b/quantus_sdk/lib/src/rust/frb_generated.dart @@ -5,6 +5,7 @@ import 'api/crypto.dart'; import 'api/ur.dart'; +import 'api/wormhole.dart'; import 'dart:async'; import 'dart:convert'; import 'frb_generated.dart'; @@ -63,7 +64,7 @@ class RustLib extends BaseEntrypoint { String get codegenVersion => '2.11.1'; @override - int get rustContentHash => 1692591137; + int get rustContentHash => 1665864519; static const kDefaultExternalLibraryLoaderConfig = ExternalLibraryLoaderConfig( stem: 'rust_lib_resonance_network_wallet', @@ -73,6 +74,48 @@ class RustLib extends BaseEntrypoint { } abstract class RustLibApi extends BaseApi { + Future crateApiWormholeWormholeProofAggregatorAddProof({ + required WormholeProofAggregator that, + required String proofHex, + }); + + Future crateApiWormholeWormholeProofAggregatorAggregate({required WormholeProofAggregator that}); + + Future crateApiWormholeWormholeProofAggregatorBatchSize({required WormholeProofAggregator that}); + + Future crateApiWormholeWormholeProofAggregatorClear({required WormholeProofAggregator that}); + + Future crateApiWormholeWormholeProofAggregatorNew({required String binsDir}); + + Future crateApiWormholeWormholeProofAggregatorProofCount({required WormholeProofAggregator that}); + + bool crateApiWormholeCheckCircuitBinariesExist({required String binsDir}); + + Future crateApiWormholeCircuitConfigLoad({required String binsDir}); + + String crateApiWormholeComputeBlockHash({ + required String parentHashHex, + required String stateRootHex, + required String extrinsicsRootHex, + required int blockNumber, + required String digestHex, + }); + + String crateApiWormholeComputeNullifier({required String secretHex, required BigInt transferCount}); + + int crateApiWormholeComputeOutputAmount({required int inputAmount, required int feeBps}); + + String crateApiWormholeComputeTransferProofStorageKey({ + required String secretHex, + required BigInt transferCount, + required String fundingAccount, + required BigInt amount, + }); + + Future crateApiWormholeCreateProofAggregator({required String binsDir}); + + Future crateApiWormholeCreateProofGenerator({required String binsDir}); + Keypair crateApiCryptoCrystalAlice(); Keypair crateApiCryptoCrystalBob(); @@ -81,22 +124,47 @@ abstract class RustLibApi extends BaseApi { Uint8List crateApiUrDecodeUr({required List urParts}); + BigInt crateApiWormholeDequantizeAmount({required int quantizedAmount}); + + String crateApiWormholeDeriveAddressFromSecret({required String secretHex}); + Uint8List crateApiCryptoDeriveHdPath({required List seed, required String path}); + WormholePairResult crateApiWormholeDeriveWormholePair({ + required String mnemonic, + required int purpose, + required int index, + }); + + String crateApiWormholeEncodeDigestFromRpcLogs({required List logsHex}); + List crateApiUrEncodeUr({required List data}); + String crateApiWormholeFirstHashToAddress({required String firstHashHex}); + + Future crateApiWormholeGenerateCircuitBinaries({ + required String outputDir, + required int numLeafProofs, + }); + Keypair crateApiCryptoGenerateDerivedKeypair({required String mnemonicStr, required String path}); Keypair crateApiCryptoGenerateKeypair({required String mnemonicStr}); Keypair crateApiCryptoGenerateKeypairFromSeed({required List seed}); + BigInt crateApiWormholeGetAggregationBatchSize({required String binsDir}); + + String crateApiWormholeGetWormholeDerivationPath({required int purpose, required int index}); + Future crateApiCryptoInitApp(); bool crateApiUrIsCompleteUr({required List urParts}); BigInt crateApiCryptoPublicKeyBytes(); + int crateApiWormholeQuantizeAmount({required BigInt amountPlanck}); + BigInt crateApiCryptoSecretKeyBytes(); void crateApiCryptoSetDefaultSs58Prefix({required int prefix}); @@ -121,11 +189,30 @@ abstract class RustLibApi extends BaseApi { required List signature, }); + String crateApiWormholeWormholeErrorToDisplayString({required WormholeError that}); + + Future crateApiWormholeWormholeProofGeneratorGenerateProof({ + required WormholeProofGenerator that, + required WormholeUtxo utxo, + required ProofOutputAssignment output, + required int feeBps, + required BlockHeaderData blockHeader, + required StorageProofData storageProof, + }); + + Future crateApiWormholeWormholeProofGeneratorNew({required String binsDir}); + RustArcIncrementStrongCountFnType get rust_arc_increment_strong_count_HdLatticeError; RustArcDecrementStrongCountFnType get rust_arc_decrement_strong_count_HdLatticeError; CrossPlatformFinalizerArg get rust_arc_decrement_strong_count_HdLatticeErrorPtr; + + RustArcIncrementStrongCountFnType get rust_arc_increment_strong_count_WormholeProofAggregator; + + RustArcDecrementStrongCountFnType get rust_arc_decrement_strong_count_WormholeProofAggregator; + + CrossPlatformFinalizerArg get rust_arc_decrement_strong_count_WormholeProofAggregatorPtr; } class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { @@ -136,13 +223,347 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { required super.portManager, }); + @override + Future crateApiWormholeWormholeProofAggregatorAddProof({ + required WormholeProofAggregator that, + required String proofHex, + }) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + that, + serializer, + ); + sse_encode_String(proofHex, serializer); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 1, port: port_); + }, + codec: SseCodec(decodeSuccessData: sse_decode_unit, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeWormholeProofAggregatorAddProofConstMeta, + argValues: [that, proofHex], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeProofAggregatorAddProofConstMeta => + const TaskConstMeta(debugName: 'WormholeProofAggregator_add_proof', argNames: ['that', 'proofHex']); + + @override + Future crateApiWormholeWormholeProofAggregatorAggregate({required WormholeProofAggregator that}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + that, + serializer, + ); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 2, port: port_); + }, + codec: SseCodec(decodeSuccessData: sse_decode_aggregated_proof, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeWormholeProofAggregatorAggregateConstMeta, + argValues: [that], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeProofAggregatorAggregateConstMeta => + const TaskConstMeta(debugName: 'WormholeProofAggregator_aggregate', argNames: ['that']); + + @override + Future crateApiWormholeWormholeProofAggregatorBatchSize({required WormholeProofAggregator that}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + that, + serializer, + ); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 3, port: port_); + }, + codec: SseCodec(decodeSuccessData: sse_decode_usize, decodeErrorData: null), + constMeta: kCrateApiWormholeWormholeProofAggregatorBatchSizeConstMeta, + argValues: [that], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeProofAggregatorBatchSizeConstMeta => + const TaskConstMeta(debugName: 'WormholeProofAggregator_batch_size', argNames: ['that']); + + @override + Future crateApiWormholeWormholeProofAggregatorClear({required WormholeProofAggregator that}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + that, + serializer, + ); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 4, port: port_); + }, + codec: SseCodec(decodeSuccessData: sse_decode_unit, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeWormholeProofAggregatorClearConstMeta, + argValues: [that], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeProofAggregatorClearConstMeta => + const TaskConstMeta(debugName: 'WormholeProofAggregator_clear', argNames: ['that']); + + @override + Future crateApiWormholeWormholeProofAggregatorNew({required String binsDir}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(binsDir, serializer); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 5, port: port_); + }, + codec: SseCodec( + decodeSuccessData: + sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator, + decodeErrorData: sse_decode_wormhole_error, + ), + constMeta: kCrateApiWormholeWormholeProofAggregatorNewConstMeta, + argValues: [binsDir], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeProofAggregatorNewConstMeta => + const TaskConstMeta(debugName: 'WormholeProofAggregator_new', argNames: ['binsDir']); + + @override + Future crateApiWormholeWormholeProofAggregatorProofCount({required WormholeProofAggregator that}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + that, + serializer, + ); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 6, port: port_); + }, + codec: SseCodec(decodeSuccessData: sse_decode_usize, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeWormholeProofAggregatorProofCountConstMeta, + argValues: [that], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeProofAggregatorProofCountConstMeta => + const TaskConstMeta(debugName: 'WormholeProofAggregator_proof_count', argNames: ['that']); + + @override + bool crateApiWormholeCheckCircuitBinariesExist({required String binsDir}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(binsDir, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 7)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_bool, decodeErrorData: null), + constMeta: kCrateApiWormholeCheckCircuitBinariesExistConstMeta, + argValues: [binsDir], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeCheckCircuitBinariesExistConstMeta => + const TaskConstMeta(debugName: 'check_circuit_binaries_exist', argNames: ['binsDir']); + + @override + Future crateApiWormholeCircuitConfigLoad({required String binsDir}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(binsDir, serializer); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 8, port: port_); + }, + codec: SseCodec(decodeSuccessData: sse_decode_circuit_config, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeCircuitConfigLoadConstMeta, + argValues: [binsDir], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeCircuitConfigLoadConstMeta => + const TaskConstMeta(debugName: 'circuit_config_load', argNames: ['binsDir']); + + @override + String crateApiWormholeComputeBlockHash({ + required String parentHashHex, + required String stateRootHex, + required String extrinsicsRootHex, + required int blockNumber, + required String digestHex, + }) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(parentHashHex, serializer); + sse_encode_String(stateRootHex, serializer); + sse_encode_String(extrinsicsRootHex, serializer); + sse_encode_u_32(blockNumber, serializer); + sse_encode_String(digestHex, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 9)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeComputeBlockHashConstMeta, + argValues: [parentHashHex, stateRootHex, extrinsicsRootHex, blockNumber, digestHex], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeComputeBlockHashConstMeta => const TaskConstMeta( + debugName: 'compute_block_hash', + argNames: ['parentHashHex', 'stateRootHex', 'extrinsicsRootHex', 'blockNumber', 'digestHex'], + ); + + @override + String crateApiWormholeComputeNullifier({required String secretHex, required BigInt transferCount}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(secretHex, serializer); + sse_encode_u_64(transferCount, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 10)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeComputeNullifierConstMeta, + argValues: [secretHex, transferCount], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeComputeNullifierConstMeta => + const TaskConstMeta(debugName: 'compute_nullifier', argNames: ['secretHex', 'transferCount']); + + @override + int crateApiWormholeComputeOutputAmount({required int inputAmount, required int feeBps}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_u_32(inputAmount, serializer); + sse_encode_u_32(feeBps, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 11)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_u_32, decodeErrorData: null), + constMeta: kCrateApiWormholeComputeOutputAmountConstMeta, + argValues: [inputAmount, feeBps], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeComputeOutputAmountConstMeta => + const TaskConstMeta(debugName: 'compute_output_amount', argNames: ['inputAmount', 'feeBps']); + + @override + String crateApiWormholeComputeTransferProofStorageKey({ + required String secretHex, + required BigInt transferCount, + required String fundingAccount, + required BigInt amount, + }) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(secretHex, serializer); + sse_encode_u_64(transferCount, serializer); + sse_encode_String(fundingAccount, serializer); + sse_encode_u_64(amount, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 12)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeComputeTransferProofStorageKeyConstMeta, + argValues: [secretHex, transferCount, fundingAccount, amount], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeComputeTransferProofStorageKeyConstMeta => const TaskConstMeta( + debugName: 'compute_transfer_proof_storage_key', + argNames: ['secretHex', 'transferCount', 'fundingAccount', 'amount'], + ); + + @override + Future crateApiWormholeCreateProofAggregator({required String binsDir}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(binsDir, serializer); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 13, port: port_); + }, + codec: SseCodec( + decodeSuccessData: + sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator, + decodeErrorData: sse_decode_wormhole_error, + ), + constMeta: kCrateApiWormholeCreateProofAggregatorConstMeta, + argValues: [binsDir], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeCreateProofAggregatorConstMeta => + const TaskConstMeta(debugName: 'create_proof_aggregator', argNames: ['binsDir']); + + @override + Future crateApiWormholeCreateProofGenerator({required String binsDir}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(binsDir, serializer); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 14, port: port_); + }, + codec: SseCodec( + decodeSuccessData: sse_decode_wormhole_proof_generator, + decodeErrorData: sse_decode_wormhole_error, + ), + constMeta: kCrateApiWormholeCreateProofGeneratorConstMeta, + argValues: [binsDir], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeCreateProofGeneratorConstMeta => + const TaskConstMeta(debugName: 'create_proof_generator', argNames: ['binsDir']); + @override Keypair crateApiCryptoCrystalAlice() { return handler.executeSync( SyncTask( callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 1)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 15)!; }, codec: SseCodec(decodeSuccessData: sse_decode_keypair, decodeErrorData: null), constMeta: kCrateApiCryptoCrystalAliceConstMeta, @@ -161,7 +582,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { SyncTask( callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 2)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 16)!; }, codec: SseCodec(decodeSuccessData: sse_decode_keypair, decodeErrorData: null), constMeta: kCrateApiCryptoCrystalBobConstMeta, @@ -179,7 +600,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { SyncTask( callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 3)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 17)!; }, codec: SseCodec(decodeSuccessData: sse_decode_keypair, decodeErrorData: null), constMeta: kCrateApiCryptoCrystalCharlieConstMeta, @@ -199,57 +620,187 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); sse_encode_list_String(urParts, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 4)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 18)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_list_prim_u_8_strict, decodeErrorData: sse_decode_String), + constMeta: kCrateApiUrDecodeUrConstMeta, + argValues: [urParts], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiUrDecodeUrConstMeta => const TaskConstMeta(debugName: 'decode_ur', argNames: ['urParts']); + + @override + BigInt crateApiWormholeDequantizeAmount({required int quantizedAmount}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_u_32(quantizedAmount, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 19)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_u_64, decodeErrorData: null), + constMeta: kCrateApiWormholeDequantizeAmountConstMeta, + argValues: [quantizedAmount], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeDequantizeAmountConstMeta => + const TaskConstMeta(debugName: 'dequantize_amount', argNames: ['quantizedAmount']); + + @override + String crateApiWormholeDeriveAddressFromSecret({required String secretHex}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(secretHex, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 20)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeDeriveAddressFromSecretConstMeta, + argValues: [secretHex], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeDeriveAddressFromSecretConstMeta => + const TaskConstMeta(debugName: 'derive_address_from_secret', argNames: ['secretHex']); + + @override + Uint8List crateApiCryptoDeriveHdPath({required List seed, required String path}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_list_prim_u_8_loose(seed, serializer); + sse_encode_String(path, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 21)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_list_prim_u_8_strict, decodeErrorData: null), + constMeta: kCrateApiCryptoDeriveHdPathConstMeta, + argValues: [seed, path], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiCryptoDeriveHdPathConstMeta => + const TaskConstMeta(debugName: 'derive_hd_path', argNames: ['seed', 'path']); + + @override + WormholePairResult crateApiWormholeDeriveWormholePair({ + required String mnemonic, + required int purpose, + required int index, + }) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(mnemonic, serializer); + sse_encode_u_32(purpose, serializer); + sse_encode_u_32(index, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 22)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_wormhole_pair_result, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeDeriveWormholePairConstMeta, + argValues: [mnemonic, purpose, index], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeDeriveWormholePairConstMeta => + const TaskConstMeta(debugName: 'derive_wormhole_pair', argNames: ['mnemonic', 'purpose', 'index']); + + @override + String crateApiWormholeEncodeDigestFromRpcLogs({required List logsHex}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_list_String(logsHex, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 23)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeEncodeDigestFromRpcLogsConstMeta, + argValues: [logsHex], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeEncodeDigestFromRpcLogsConstMeta => + const TaskConstMeta(debugName: 'encode_digest_from_rpc_logs', argNames: ['logsHex']); + + @override + List crateApiUrEncodeUr({required List data}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_list_prim_u_8_loose(data, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 24)!; }, - codec: SseCodec(decodeSuccessData: sse_decode_list_prim_u_8_strict, decodeErrorData: sse_decode_String), - constMeta: kCrateApiUrDecodeUrConstMeta, - argValues: [urParts], + codec: SseCodec(decodeSuccessData: sse_decode_list_String, decodeErrorData: sse_decode_String), + constMeta: kCrateApiUrEncodeUrConstMeta, + argValues: [data], apiImpl: this, ), ); } - TaskConstMeta get kCrateApiUrDecodeUrConstMeta => const TaskConstMeta(debugName: 'decode_ur', argNames: ['urParts']); + TaskConstMeta get kCrateApiUrEncodeUrConstMeta => const TaskConstMeta(debugName: 'encode_ur', argNames: ['data']); @override - Uint8List crateApiCryptoDeriveHdPath({required List seed, required String path}) { + String crateApiWormholeFirstHashToAddress({required String firstHashHex}) { return handler.executeSync( SyncTask( callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); - sse_encode_list_prim_u_8_loose(seed, serializer); - sse_encode_String(path, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 5)!; + sse_encode_String(firstHashHex, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 25)!; }, - codec: SseCodec(decodeSuccessData: sse_decode_list_prim_u_8_strict, decodeErrorData: null), - constMeta: kCrateApiCryptoDeriveHdPathConstMeta, - argValues: [seed, path], + codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeFirstHashToAddressConstMeta, + argValues: [firstHashHex], apiImpl: this, ), ); } - TaskConstMeta get kCrateApiCryptoDeriveHdPathConstMeta => - const TaskConstMeta(debugName: 'derive_hd_path', argNames: ['seed', 'path']); + TaskConstMeta get kCrateApiWormholeFirstHashToAddressConstMeta => + const TaskConstMeta(debugName: 'first_hash_to_address', argNames: ['firstHashHex']); @override - List crateApiUrEncodeUr({required List data}) { - return handler.executeSync( - SyncTask( - callFfi: () { + Future crateApiWormholeGenerateCircuitBinaries({ + required String outputDir, + required int numLeafProofs, + }) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { final serializer = SseSerializer(generalizedFrbRustBinding); - sse_encode_list_prim_u_8_loose(data, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 6)!; + sse_encode_String(outputDir, serializer); + sse_encode_u_32(numLeafProofs, serializer); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 26, port: port_); }, - codec: SseCodec(decodeSuccessData: sse_decode_list_String, decodeErrorData: sse_decode_String), - constMeta: kCrateApiUrEncodeUrConstMeta, - argValues: [data], + codec: SseCodec(decodeSuccessData: sse_decode_circuit_generation_result, decodeErrorData: null), + constMeta: kCrateApiWormholeGenerateCircuitBinariesConstMeta, + argValues: [outputDir, numLeafProofs], apiImpl: this, ), ); } - TaskConstMeta get kCrateApiUrEncodeUrConstMeta => const TaskConstMeta(debugName: 'encode_ur', argNames: ['data']); + TaskConstMeta get kCrateApiWormholeGenerateCircuitBinariesConstMeta => + const TaskConstMeta(debugName: 'generate_circuit_binaries', argNames: ['outputDir', 'numLeafProofs']); @override Keypair crateApiCryptoGenerateDerivedKeypair({required String mnemonicStr, required String path}) { @@ -259,7 +810,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { final serializer = SseSerializer(generalizedFrbRustBinding); sse_encode_String(mnemonicStr, serializer); sse_encode_String(path, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 7)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 27)!; }, codec: SseCodec( decodeSuccessData: sse_decode_keypair, @@ -283,7 +834,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); sse_encode_String(mnemonicStr, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 8)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 28)!; }, codec: SseCodec(decodeSuccessData: sse_decode_keypair, decodeErrorData: null), constMeta: kCrateApiCryptoGenerateKeypairConstMeta, @@ -303,7 +854,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); sse_encode_list_prim_u_8_loose(seed, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 9)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 29)!; }, codec: SseCodec(decodeSuccessData: sse_decode_keypair, decodeErrorData: null), constMeta: kCrateApiCryptoGenerateKeypairFromSeedConstMeta, @@ -316,13 +867,54 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { TaskConstMeta get kCrateApiCryptoGenerateKeypairFromSeedConstMeta => const TaskConstMeta(debugName: 'generate_keypair_from_seed', argNames: ['seed']); + @override + BigInt crateApiWormholeGetAggregationBatchSize({required String binsDir}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(binsDir, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 30)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_usize, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeGetAggregationBatchSizeConstMeta, + argValues: [binsDir], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeGetAggregationBatchSizeConstMeta => + const TaskConstMeta(debugName: 'get_aggregation_batch_size', argNames: ['binsDir']); + + @override + String crateApiWormholeGetWormholeDerivationPath({required int purpose, required int index}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_u_32(purpose, serializer); + sse_encode_u_32(index, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 31)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: null), + constMeta: kCrateApiWormholeGetWormholeDerivationPathConstMeta, + argValues: [purpose, index], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeGetWormholeDerivationPathConstMeta => + const TaskConstMeta(debugName: 'get_wormhole_derivation_path', argNames: ['purpose', 'index']); + @override Future crateApiCryptoInitApp() { return handler.executeNormal( NormalTask( callFfi: (port_) { final serializer = SseSerializer(generalizedFrbRustBinding); - pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 10, port: port_); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 32, port: port_); }, codec: SseCodec(decodeSuccessData: sse_decode_unit, decodeErrorData: null), constMeta: kCrateApiCryptoInitAppConstMeta, @@ -341,7 +933,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); sse_encode_list_String(urParts, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 11)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 33)!; }, codec: SseCodec(decodeSuccessData: sse_decode_bool, decodeErrorData: null), constMeta: kCrateApiUrIsCompleteUrConstMeta, @@ -360,7 +952,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { SyncTask( callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 12)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 34)!; }, codec: SseCodec(decodeSuccessData: sse_decode_usize, decodeErrorData: null), constMeta: kCrateApiCryptoPublicKeyBytesConstMeta, @@ -373,13 +965,33 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { TaskConstMeta get kCrateApiCryptoPublicKeyBytesConstMeta => const TaskConstMeta(debugName: 'public_key_bytes', argNames: []); + @override + int crateApiWormholeQuantizeAmount({required BigInt amountPlanck}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_u_64(amountPlanck, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 35)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_u_32, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeQuantizeAmountConstMeta, + argValues: [amountPlanck], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeQuantizeAmountConstMeta => + const TaskConstMeta(debugName: 'quantize_amount', argNames: ['amountPlanck']); + @override BigInt crateApiCryptoSecretKeyBytes() { return handler.executeSync( SyncTask( callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 13)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 36)!; }, codec: SseCodec(decodeSuccessData: sse_decode_usize, decodeErrorData: null), constMeta: kCrateApiCryptoSecretKeyBytesConstMeta, @@ -399,7 +1011,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); sse_encode_u_16(prefix, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 14)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 37)!; }, codec: SseCodec(decodeSuccessData: sse_decode_unit, decodeErrorData: null), constMeta: kCrateApiCryptoSetDefaultSs58PrefixConstMeta, @@ -421,7 +1033,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { sse_encode_box_autoadd_keypair(keypair, serializer); sse_encode_list_prim_u_8_loose(message, serializer); sse_encode_opt_u_8_array_32(entropy, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 15)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 38)!; }, codec: SseCodec(decodeSuccessData: sse_decode_list_prim_u_8_strict, decodeErrorData: null), constMeta: kCrateApiCryptoSignMessageConstMeta, @@ -447,7 +1059,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { sse_encode_box_autoadd_keypair(keypair, serializer); sse_encode_list_prim_u_8_loose(message, serializer); sse_encode_opt_u_8_array_32(entropy, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 16)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 39)!; }, codec: SseCodec(decodeSuccessData: sse_decode_list_prim_u_8_strict, decodeErrorData: null), constMeta: kCrateApiCryptoSignMessageWithPubkeyConstMeta, @@ -466,7 +1078,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { SyncTask( callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 17)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 40)!; }, codec: SseCodec(decodeSuccessData: sse_decode_usize, decodeErrorData: null), constMeta: kCrateApiCryptoSignatureBytesConstMeta, @@ -486,7 +1098,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); sse_encode_String(s, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 18)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 41)!; }, codec: SseCodec(decodeSuccessData: sse_decode_list_prim_u_8_strict, decodeErrorData: null), constMeta: kCrateApiCryptoSs58ToAccountIdConstMeta, @@ -506,7 +1118,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { callFfi: () { final serializer = SseSerializer(generalizedFrbRustBinding); sse_encode_box_autoadd_keypair(obj, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 19)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 42)!; }, codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: null), constMeta: kCrateApiCryptoToAccountIdConstMeta, @@ -532,7 +1144,7 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { sse_encode_box_autoadd_keypair(keypair, serializer); sse_encode_list_prim_u_8_loose(message, serializer); sse_encode_list_prim_u_8_loose(signature, serializer); - return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 20)!; + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 43)!; }, codec: SseCodec(decodeSuccessData: sse_decode_bool, decodeErrorData: null), constMeta: kCrateApiCryptoVerifyMessageConstMeta, @@ -545,12 +1157,95 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { TaskConstMeta get kCrateApiCryptoVerifyMessageConstMeta => const TaskConstMeta(debugName: 'verify_message', argNames: ['keypair', 'message', 'signature']); + @override + String crateApiWormholeWormholeErrorToDisplayString({required WormholeError that}) { + return handler.executeSync( + SyncTask( + callFfi: () { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_box_autoadd_wormhole_error(that, serializer); + return pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 44)!; + }, + codec: SseCodec(decodeSuccessData: sse_decode_String, decodeErrorData: null), + constMeta: kCrateApiWormholeWormholeErrorToDisplayStringConstMeta, + argValues: [that], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeErrorToDisplayStringConstMeta => + const TaskConstMeta(debugName: 'wormhole_error_to_display_string(dart_style=toString)', argNames: ['that']); + + @override + Future crateApiWormholeWormholeProofGeneratorGenerateProof({ + required WormholeProofGenerator that, + required WormholeUtxo utxo, + required ProofOutputAssignment output, + required int feeBps, + required BlockHeaderData blockHeader, + required StorageProofData storageProof, + }) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_box_autoadd_wormhole_proof_generator(that, serializer); + sse_encode_box_autoadd_wormhole_utxo(utxo, serializer); + sse_encode_box_autoadd_proof_output_assignment(output, serializer); + sse_encode_u_32(feeBps, serializer); + sse_encode_box_autoadd_block_header_data(blockHeader, serializer); + sse_encode_box_autoadd_storage_proof_data(storageProof, serializer); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 45, port: port_); + }, + codec: SseCodec(decodeSuccessData: sse_decode_generated_proof, decodeErrorData: sse_decode_wormhole_error), + constMeta: kCrateApiWormholeWormholeProofGeneratorGenerateProofConstMeta, + argValues: [that, utxo, output, feeBps, blockHeader, storageProof], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeProofGeneratorGenerateProofConstMeta => const TaskConstMeta( + debugName: 'wormhole_proof_generator_generate_proof', + argNames: ['that', 'utxo', 'output', 'feeBps', 'blockHeader', 'storageProof'], + ); + + @override + Future crateApiWormholeWormholeProofGeneratorNew({required String binsDir}) { + return handler.executeNormal( + NormalTask( + callFfi: (port_) { + final serializer = SseSerializer(generalizedFrbRustBinding); + sse_encode_String(binsDir, serializer); + pdeCallFfi(generalizedFrbRustBinding, serializer, funcId: 46, port: port_); + }, + codec: SseCodec( + decodeSuccessData: sse_decode_wormhole_proof_generator, + decodeErrorData: sse_decode_wormhole_error, + ), + constMeta: kCrateApiWormholeWormholeProofGeneratorNewConstMeta, + argValues: [binsDir], + apiImpl: this, + ), + ); + } + + TaskConstMeta get kCrateApiWormholeWormholeProofGeneratorNewConstMeta => + const TaskConstMeta(debugName: 'wormhole_proof_generator_new', argNames: ['binsDir']); + RustArcIncrementStrongCountFnType get rust_arc_increment_strong_count_HdLatticeError => wire.rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError; RustArcDecrementStrongCountFnType get rust_arc_decrement_strong_count_HdLatticeError => wire.rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError; + RustArcIncrementStrongCountFnType get rust_arc_increment_strong_count_WormholeProofAggregator => wire + .rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator; + + RustArcDecrementStrongCountFnType get rust_arc_decrement_strong_count_WormholeProofAggregator => wire + .rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator; + @protected HdLatticeError dco_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( dynamic raw, @@ -559,30 +1254,141 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { return HdLatticeErrorImpl.frbInternalDcoDecode(raw as List); } + @protected + WormholeProofAggregator + dco_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + dynamic raw, + ) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return WormholeProofAggregatorImpl.frbInternalDcoDecode(raw as List); + } + + @protected + WormholeProofAggregator + dco_decode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + dynamic raw, + ) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return WormholeProofAggregatorImpl.frbInternalDcoDecode(raw as List); + } + @protected HdLatticeError dco_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs return HdLatticeErrorImpl.frbInternalDcoDecode(raw as List); } + @protected + WormholeProofAggregator + dco_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return WormholeProofAggregatorImpl.frbInternalDcoDecode(raw as List); + } + @protected String dco_decode_String(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs return raw as String; } + @protected + AggregatedProof dco_decode_aggregated_proof(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + final arr = raw as List; + if (arr.length != 2) throw Exception('unexpected arr length: expect 2 but see ${arr.length}'); + return AggregatedProof(proofHex: dco_decode_String(arr[0]), numRealProofs: dco_decode_usize(arr[1])); + } + + @protected + BlockHeaderData dco_decode_block_header_data(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + final arr = raw as List; + if (arr.length != 5) throw Exception('unexpected arr length: expect 5 but see ${arr.length}'); + return BlockHeaderData( + parentHashHex: dco_decode_String(arr[0]), + stateRootHex: dco_decode_String(arr[1]), + extrinsicsRootHex: dco_decode_String(arr[2]), + blockNumber: dco_decode_u_32(arr[3]), + digestHex: dco_decode_String(arr[4]), + ); + } + @protected bool dco_decode_bool(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs return raw as bool; } + @protected + BlockHeaderData dco_decode_box_autoadd_block_header_data(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return dco_decode_block_header_data(raw); + } + @protected Keypair dco_decode_box_autoadd_keypair(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs return dco_decode_keypair(raw); } + @protected + ProofOutputAssignment dco_decode_box_autoadd_proof_output_assignment(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return dco_decode_proof_output_assignment(raw); + } + + @protected + StorageProofData dco_decode_box_autoadd_storage_proof_data(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return dco_decode_storage_proof_data(raw); + } + + @protected + WormholeError dco_decode_box_autoadd_wormhole_error(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return dco_decode_wormhole_error(raw); + } + + @protected + WormholeProofGenerator dco_decode_box_autoadd_wormhole_proof_generator(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return dco_decode_wormhole_proof_generator(raw); + } + + @protected + WormholeUtxo dco_decode_box_autoadd_wormhole_utxo(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return dco_decode_wormhole_utxo(raw); + } + + @protected + CircuitConfig dco_decode_circuit_config(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + final arr = raw as List; + if (arr.length != 1) throw Exception('unexpected arr length: expect 1 but see ${arr.length}'); + return CircuitConfig(numLeafProofs: dco_decode_usize(arr[0])); + } + + @protected + CircuitGenerationResult dco_decode_circuit_generation_result(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + final arr = raw as List; + if (arr.length != 3) throw Exception('unexpected arr length: expect 3 but see ${arr.length}'); + return CircuitGenerationResult( + success: dco_decode_bool(arr[0]), + error: dco_decode_opt_String(arr[1]), + outputDir: dco_decode_opt_String(arr[2]), + ); + } + + @protected + GeneratedProof dco_decode_generated_proof(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + final arr = raw as List; + if (arr.length != 2) throw Exception('unexpected arr length: expect 2 but see ${arr.length}'); + return GeneratedProof(proofHex: dco_decode_String(arr[0]), nullifierHex: dco_decode_String(arr[1])); + } + @protected Keypair dco_decode_keypair(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs @@ -601,55 +1407,170 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { } @protected - List dco_decode_list_prim_u_8_loose(dynamic raw) { + List dco_decode_list_prim_u_8_loose(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return raw as List; + } + + @protected + Uint8List dco_decode_list_prim_u_8_strict(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return raw as Uint8List; + } + + @protected + String? dco_decode_opt_String(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return raw == null ? null : dco_decode_String(raw); + } + + @protected + U8Array32? dco_decode_opt_u_8_array_32(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return raw == null ? null : dco_decode_u_8_array_32(raw); + } + + @protected + ProofOutputAssignment dco_decode_proof_output_assignment(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + final arr = raw as List; + if (arr.length != 4) throw Exception('unexpected arr length: expect 4 but see ${arr.length}'); + return ProofOutputAssignment( + outputAmount1: dco_decode_u_32(arr[0]), + exitAccount1: dco_decode_String(arr[1]), + outputAmount2: dco_decode_u_32(arr[2]), + exitAccount2: dco_decode_String(arr[3]), + ); + } + + @protected + StorageProofData dco_decode_storage_proof_data(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + final arr = raw as List; + if (arr.length != 2) throw Exception('unexpected arr length: expect 2 but see ${arr.length}'); + return StorageProofData(proofNodesHex: dco_decode_list_String(arr[0]), stateRootHex: dco_decode_String(arr[1])); + } + + @protected + int dco_decode_u_16(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return raw as int; + } + + @protected + int dco_decode_u_32(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return raw as int; + } + + @protected + BigInt dco_decode_u_64(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return dcoDecodeU64(raw); + } + + @protected + int dco_decode_u_8(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return raw as int; + } + + @protected + U8Array32 dco_decode_u_8_array_32(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return U8Array32(dco_decode_list_prim_u_8_strict(raw)); + } + + @protected + void dco_decode_unit(dynamic raw) { + // Codec=Dco (DartCObject based), see doc to use other codecs + return; + } + + @protected + BigInt dco_decode_usize(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs - return raw as List; + return dcoDecodeU64(raw); } @protected - Uint8List dco_decode_list_prim_u_8_strict(dynamic raw) { + WormholeError dco_decode_wormhole_error(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs - return raw as Uint8List; + final arr = raw as List; + if (arr.length != 1) throw Exception('unexpected arr length: expect 1 but see ${arr.length}'); + return WormholeError(message: dco_decode_String(arr[0])); } @protected - U8Array32? dco_decode_opt_u_8_array_32(dynamic raw) { + WormholePairResult dco_decode_wormhole_pair_result(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs - return raw == null ? null : dco_decode_u_8_array_32(raw); + final arr = raw as List; + if (arr.length != 5) throw Exception('unexpected arr length: expect 5 but see ${arr.length}'); + return WormholePairResult( + address: dco_decode_String(arr[0]), + addressHex: dco_decode_String(arr[1]), + firstHashSs58: dco_decode_String(arr[2]), + firstHashHex: dco_decode_String(arr[3]), + secretHex: dco_decode_String(arr[4]), + ); } @protected - int dco_decode_u_16(dynamic raw) { + WormholeProofGenerator dco_decode_wormhole_proof_generator(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs - return raw as int; + final arr = raw as List; + if (arr.length != 1) throw Exception('unexpected arr length: expect 1 but see ${arr.length}'); + return WormholeProofGenerator(binsDir: dco_decode_String(arr[0])); } @protected - int dco_decode_u_8(dynamic raw) { + WormholeUtxo dco_decode_wormhole_utxo(dynamic raw) { // Codec=Dco (DartCObject based), see doc to use other codecs - return raw as int; + final arr = raw as List; + if (arr.length != 5) throw Exception('unexpected arr length: expect 5 but see ${arr.length}'); + return WormholeUtxo( + secretHex: dco_decode_String(arr[0]), + amount: dco_decode_u_64(arr[1]), + transferCount: dco_decode_u_64(arr[2]), + fundingAccountHex: dco_decode_String(arr[3]), + blockHashHex: dco_decode_String(arr[4]), + ); } @protected - U8Array32 dco_decode_u_8_array_32(dynamic raw) { - // Codec=Dco (DartCObject based), see doc to use other codecs - return U8Array32(dco_decode_list_prim_u_8_strict(raw)); + HdLatticeError sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( + SseDeserializer deserializer, + ) { + // Codec=Sse (Serialization based), see doc to use other codecs + return HdLatticeErrorImpl.frbInternalSseDecode(sse_decode_usize(deserializer), sse_decode_i_32(deserializer)); } @protected - void dco_decode_unit(dynamic raw) { - // Codec=Dco (DartCObject based), see doc to use other codecs - return; + WormholeProofAggregator + sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + SseDeserializer deserializer, + ) { + // Codec=Sse (Serialization based), see doc to use other codecs + return WormholeProofAggregatorImpl.frbInternalSseDecode( + sse_decode_usize(deserializer), + sse_decode_i_32(deserializer), + ); } @protected - BigInt dco_decode_usize(dynamic raw) { - // Codec=Dco (DartCObject based), see doc to use other codecs - return dcoDecodeU64(raw); + WormholeProofAggregator + sse_decode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + SseDeserializer deserializer, + ) { + // Codec=Sse (Serialization based), see doc to use other codecs + return WormholeProofAggregatorImpl.frbInternalSseDecode( + sse_decode_usize(deserializer), + sse_decode_i_32(deserializer), + ); } @protected - HdLatticeError sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( + HdLatticeError sse_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( SseDeserializer deserializer, ) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -657,11 +1578,15 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { } @protected - HdLatticeError sse_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( + WormholeProofAggregator + sse_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( SseDeserializer deserializer, ) { // Codec=Sse (Serialization based), see doc to use other codecs - return HdLatticeErrorImpl.frbInternalSseDecode(sse_decode_usize(deserializer), sse_decode_i_32(deserializer)); + return WormholeProofAggregatorImpl.frbInternalSseDecode( + sse_decode_usize(deserializer), + sse_decode_i_32(deserializer), + ); } @protected @@ -671,18 +1596,103 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { return utf8.decoder.convert(inner); } + @protected + AggregatedProof sse_decode_aggregated_proof(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_proofHex = sse_decode_String(deserializer); + var var_numRealProofs = sse_decode_usize(deserializer); + return AggregatedProof(proofHex: var_proofHex, numRealProofs: var_numRealProofs); + } + + @protected + BlockHeaderData sse_decode_block_header_data(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_parentHashHex = sse_decode_String(deserializer); + var var_stateRootHex = sse_decode_String(deserializer); + var var_extrinsicsRootHex = sse_decode_String(deserializer); + var var_blockNumber = sse_decode_u_32(deserializer); + var var_digestHex = sse_decode_String(deserializer); + return BlockHeaderData( + parentHashHex: var_parentHashHex, + stateRootHex: var_stateRootHex, + extrinsicsRootHex: var_extrinsicsRootHex, + blockNumber: var_blockNumber, + digestHex: var_digestHex, + ); + } + @protected bool sse_decode_bool(SseDeserializer deserializer) { // Codec=Sse (Serialization based), see doc to use other codecs return deserializer.buffer.getUint8() != 0; } + @protected + BlockHeaderData sse_decode_box_autoadd_block_header_data(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + return (sse_decode_block_header_data(deserializer)); + } + @protected Keypair sse_decode_box_autoadd_keypair(SseDeserializer deserializer) { // Codec=Sse (Serialization based), see doc to use other codecs return (sse_decode_keypair(deserializer)); } + @protected + ProofOutputAssignment sse_decode_box_autoadd_proof_output_assignment(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + return (sse_decode_proof_output_assignment(deserializer)); + } + + @protected + StorageProofData sse_decode_box_autoadd_storage_proof_data(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + return (sse_decode_storage_proof_data(deserializer)); + } + + @protected + WormholeError sse_decode_box_autoadd_wormhole_error(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + return (sse_decode_wormhole_error(deserializer)); + } + + @protected + WormholeProofGenerator sse_decode_box_autoadd_wormhole_proof_generator(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + return (sse_decode_wormhole_proof_generator(deserializer)); + } + + @protected + WormholeUtxo sse_decode_box_autoadd_wormhole_utxo(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + return (sse_decode_wormhole_utxo(deserializer)); + } + + @protected + CircuitConfig sse_decode_circuit_config(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_numLeafProofs = sse_decode_usize(deserializer); + return CircuitConfig(numLeafProofs: var_numLeafProofs); + } + + @protected + CircuitGenerationResult sse_decode_circuit_generation_result(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_success = sse_decode_bool(deserializer); + var var_error = sse_decode_opt_String(deserializer); + var var_outputDir = sse_decode_opt_String(deserializer); + return CircuitGenerationResult(success: var_success, error: var_error, outputDir: var_outputDir); + } + + @protected + GeneratedProof sse_decode_generated_proof(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_proofHex = sse_decode_String(deserializer); + var var_nullifierHex = sse_decode_String(deserializer); + return GeneratedProof(proofHex: var_proofHex, nullifierHex: var_nullifierHex); + } + @protected Keypair sse_decode_keypair(SseDeserializer deserializer) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -717,6 +1727,17 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { return deserializer.buffer.getUint8List(len_); } + @protected + String? sse_decode_opt_String(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + + if (sse_decode_bool(deserializer)) { + return (sse_decode_String(deserializer)); + } else { + return null; + } + } + @protected U8Array32? sse_decode_opt_u_8_array_32(SseDeserializer deserializer) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -728,12 +1749,47 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { } } + @protected + ProofOutputAssignment sse_decode_proof_output_assignment(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_outputAmount1 = sse_decode_u_32(deserializer); + var var_exitAccount1 = sse_decode_String(deserializer); + var var_outputAmount2 = sse_decode_u_32(deserializer); + var var_exitAccount2 = sse_decode_String(deserializer); + return ProofOutputAssignment( + outputAmount1: var_outputAmount1, + exitAccount1: var_exitAccount1, + outputAmount2: var_outputAmount2, + exitAccount2: var_exitAccount2, + ); + } + + @protected + StorageProofData sse_decode_storage_proof_data(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_proofNodesHex = sse_decode_list_String(deserializer); + var var_stateRootHex = sse_decode_String(deserializer); + return StorageProofData(proofNodesHex: var_proofNodesHex, stateRootHex: var_stateRootHex); + } + @protected int sse_decode_u_16(SseDeserializer deserializer) { // Codec=Sse (Serialization based), see doc to use other codecs return deserializer.buffer.getUint16(); } + @protected + int sse_decode_u_32(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + return deserializer.buffer.getUint32(); + } + + @protected + BigInt sse_decode_u_64(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + return deserializer.buffer.getBigUint64(); + } + @protected int sse_decode_u_8(SseDeserializer deserializer) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -758,6 +1814,54 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { return deserializer.buffer.getBigUint64(); } + @protected + WormholeError sse_decode_wormhole_error(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_message = sse_decode_String(deserializer); + return WormholeError(message: var_message); + } + + @protected + WormholePairResult sse_decode_wormhole_pair_result(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_address = sse_decode_String(deserializer); + var var_addressHex = sse_decode_String(deserializer); + var var_firstHashSs58 = sse_decode_String(deserializer); + var var_firstHashHex = sse_decode_String(deserializer); + var var_secretHex = sse_decode_String(deserializer); + return WormholePairResult( + address: var_address, + addressHex: var_addressHex, + firstHashSs58: var_firstHashSs58, + firstHashHex: var_firstHashHex, + secretHex: var_secretHex, + ); + } + + @protected + WormholeProofGenerator sse_decode_wormhole_proof_generator(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_binsDir = sse_decode_String(deserializer); + return WormholeProofGenerator(binsDir: var_binsDir); + } + + @protected + WormholeUtxo sse_decode_wormhole_utxo(SseDeserializer deserializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + var var_secretHex = sse_decode_String(deserializer); + var var_amount = sse_decode_u_64(deserializer); + var var_transferCount = sse_decode_u_64(deserializer); + var var_fundingAccountHex = sse_decode_String(deserializer); + var var_blockHashHex = sse_decode_String(deserializer); + return WormholeUtxo( + secretHex: var_secretHex, + amount: var_amount, + transferCount: var_transferCount, + fundingAccountHex: var_fundingAccountHex, + blockHashHex: var_blockHashHex, + ); + } + @protected int sse_decode_i_32(SseDeserializer deserializer) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -773,6 +1877,24 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { sse_encode_usize((self as HdLatticeErrorImpl).frbInternalSseEncode(move: true), serializer); } + @protected + void sse_encode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_usize((self as WormholeProofAggregatorImpl).frbInternalSseEncode(move: true), serializer); + } + + @protected + void sse_encode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_usize((self as WormholeProofAggregatorImpl).frbInternalSseEncode(move: false), serializer); + } + @protected void sse_encode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( HdLatticeError self, @@ -782,24 +1904,107 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { sse_encode_usize((self as HdLatticeErrorImpl).frbInternalSseEncode(move: null), serializer); } + @protected + void sse_encode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_usize((self as WormholeProofAggregatorImpl).frbInternalSseEncode(move: null), serializer); + } + @protected void sse_encode_String(String self, SseSerializer serializer) { // Codec=Sse (Serialization based), see doc to use other codecs sse_encode_list_prim_u_8_strict(utf8.encoder.convert(self), serializer); } + @protected + void sse_encode_aggregated_proof(AggregatedProof self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_String(self.proofHex, serializer); + sse_encode_usize(self.numRealProofs, serializer); + } + + @protected + void sse_encode_block_header_data(BlockHeaderData self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_String(self.parentHashHex, serializer); + sse_encode_String(self.stateRootHex, serializer); + sse_encode_String(self.extrinsicsRootHex, serializer); + sse_encode_u_32(self.blockNumber, serializer); + sse_encode_String(self.digestHex, serializer); + } + @protected void sse_encode_bool(bool self, SseSerializer serializer) { // Codec=Sse (Serialization based), see doc to use other codecs serializer.buffer.putUint8(self ? 1 : 0); } + @protected + void sse_encode_box_autoadd_block_header_data(BlockHeaderData self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_block_header_data(self, serializer); + } + @protected void sse_encode_box_autoadd_keypair(Keypair self, SseSerializer serializer) { // Codec=Sse (Serialization based), see doc to use other codecs sse_encode_keypair(self, serializer); } + @protected + void sse_encode_box_autoadd_proof_output_assignment(ProofOutputAssignment self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_proof_output_assignment(self, serializer); + } + + @protected + void sse_encode_box_autoadd_storage_proof_data(StorageProofData self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_storage_proof_data(self, serializer); + } + + @protected + void sse_encode_box_autoadd_wormhole_error(WormholeError self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_wormhole_error(self, serializer); + } + + @protected + void sse_encode_box_autoadd_wormhole_proof_generator(WormholeProofGenerator self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_wormhole_proof_generator(self, serializer); + } + + @protected + void sse_encode_box_autoadd_wormhole_utxo(WormholeUtxo self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_wormhole_utxo(self, serializer); + } + + @protected + void sse_encode_circuit_config(CircuitConfig self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_usize(self.numLeafProofs, serializer); + } + + @protected + void sse_encode_circuit_generation_result(CircuitGenerationResult self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_bool(self.success, serializer); + sse_encode_opt_String(self.error, serializer); + sse_encode_opt_String(self.outputDir, serializer); + } + + @protected + void sse_encode_generated_proof(GeneratedProof self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_String(self.proofHex, serializer); + sse_encode_String(self.nullifierHex, serializer); + } + @protected void sse_encode_keypair(Keypair self, SseSerializer serializer) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -830,6 +2035,16 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { serializer.buffer.putUint8List(self); } + @protected + void sse_encode_opt_String(String? self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + + sse_encode_bool(self != null, serializer); + if (self != null) { + sse_encode_String(self, serializer); + } + } + @protected void sse_encode_opt_u_8_array_32(U8Array32? self, SseSerializer serializer) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -840,12 +2055,40 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { } } + @protected + void sse_encode_proof_output_assignment(ProofOutputAssignment self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_u_32(self.outputAmount1, serializer); + sse_encode_String(self.exitAccount1, serializer); + sse_encode_u_32(self.outputAmount2, serializer); + sse_encode_String(self.exitAccount2, serializer); + } + + @protected + void sse_encode_storage_proof_data(StorageProofData self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_list_String(self.proofNodesHex, serializer); + sse_encode_String(self.stateRootHex, serializer); + } + @protected void sse_encode_u_16(int self, SseSerializer serializer) { // Codec=Sse (Serialization based), see doc to use other codecs serializer.buffer.putUint16(self); } + @protected + void sse_encode_u_32(int self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + serializer.buffer.putUint32(self); + } + + @protected + void sse_encode_u_64(BigInt self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + serializer.buffer.putBigUint64(self); + } + @protected void sse_encode_u_8(int self, SseSerializer serializer) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -869,6 +2112,38 @@ class RustLibApiImpl extends RustLibApiImplPlatform implements RustLibApi { serializer.buffer.putBigUint64(self); } + @protected + void sse_encode_wormhole_error(WormholeError self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_String(self.message, serializer); + } + + @protected + void sse_encode_wormhole_pair_result(WormholePairResult self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_String(self.address, serializer); + sse_encode_String(self.addressHex, serializer); + sse_encode_String(self.firstHashSs58, serializer); + sse_encode_String(self.firstHashHex, serializer); + sse_encode_String(self.secretHex, serializer); + } + + @protected + void sse_encode_wormhole_proof_generator(WormholeProofGenerator self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_String(self.binsDir, serializer); + } + + @protected + void sse_encode_wormhole_utxo(WormholeUtxo self, SseSerializer serializer) { + // Codec=Sse (Serialization based), see doc to use other codecs + sse_encode_String(self.secretHex, serializer); + sse_encode_u_64(self.amount, serializer); + sse_encode_u_64(self.transferCount, serializer); + sse_encode_String(self.fundingAccountHex, serializer); + sse_encode_String(self.blockHashHex, serializer); + } + @protected void sse_encode_i_32(int self, SseSerializer serializer) { // Codec=Sse (Serialization based), see doc to use other codecs @@ -891,3 +2166,45 @@ class HdLatticeErrorImpl extends RustOpaque implements HdLatticeError { rustArcDecrementStrongCountPtr: RustLib.instance.api.rust_arc_decrement_strong_count_HdLatticeErrorPtr, ); } + +@sealed +class WormholeProofAggregatorImpl extends RustOpaque implements WormholeProofAggregator { + // Not to be used by end users + WormholeProofAggregatorImpl.frbInternalDcoDecode(List wire) : super.frbInternalDcoDecode(wire, _kStaticData); + + // Not to be used by end users + WormholeProofAggregatorImpl.frbInternalSseDecode(BigInt ptr, int externalSizeOnNative) + : super.frbInternalSseDecode(ptr, externalSizeOnNative, _kStaticData); + + static final _kStaticData = RustArcStaticData( + rustArcIncrementStrongCount: RustLib.instance.api.rust_arc_increment_strong_count_WormholeProofAggregator, + rustArcDecrementStrongCount: RustLib.instance.api.rust_arc_decrement_strong_count_WormholeProofAggregator, + rustArcDecrementStrongCountPtr: RustLib.instance.api.rust_arc_decrement_strong_count_WormholeProofAggregatorPtr, + ); + + /// Add a proof to the aggregation buffer. + /// + /// # Arguments + /// * `proof_hex` - The serialized proof bytes (hex encoded with 0x prefix) + Future addProof({required String proofHex}) => + RustLib.instance.api.crateApiWormholeWormholeProofAggregatorAddProof(that: this, proofHex: proofHex); + + /// Aggregate all proofs in the buffer. + /// + /// If fewer than `batch_size` proofs have been added, the remaining + /// slots are filled with dummy proofs automatically. + /// + /// # Returns + /// The aggregated proof. + Future aggregate() => + RustLib.instance.api.crateApiWormholeWormholeProofAggregatorAggregate(that: this); + + /// Get the batch size (number of proofs per aggregation). + Future batchSize() => RustLib.instance.api.crateApiWormholeWormholeProofAggregatorBatchSize(that: this); + + /// Clear the proof buffer without aggregating. + Future clear() => RustLib.instance.api.crateApiWormholeWormholeProofAggregatorClear(that: this); + + /// Get the number of proofs currently in the buffer. + Future proofCount() => RustLib.instance.api.crateApiWormholeWormholeProofAggregatorProofCount(that: this); +} diff --git a/quantus_sdk/lib/src/rust/frb_generated.io.dart b/quantus_sdk/lib/src/rust/frb_generated.io.dart index 3e6e1fed..7c745ad1 100644 --- a/quantus_sdk/lib/src/rust/frb_generated.io.dart +++ b/quantus_sdk/lib/src/rust/frb_generated.io.dart @@ -5,6 +5,7 @@ import 'api/crypto.dart'; import 'api/ur.dart'; +import 'api/wormhole.dart'; import 'dart:async'; import 'dart:convert'; import 'dart:ffi' as ffi; @@ -22,23 +23,75 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { CrossPlatformFinalizerArg get rust_arc_decrement_strong_count_HdLatticeErrorPtr => wire ._rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeErrorPtr; + CrossPlatformFinalizerArg get rust_arc_decrement_strong_count_WormholeProofAggregatorPtr => wire + ._rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregatorPtr; + @protected HdLatticeError dco_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( dynamic raw, ); + @protected + WormholeProofAggregator + dco_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + dynamic raw, + ); + + @protected + WormholeProofAggregator + dco_decode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + dynamic raw, + ); + @protected HdLatticeError dco_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError(dynamic raw); + @protected + WormholeProofAggregator + dco_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator(dynamic raw); + @protected String dco_decode_String(dynamic raw); + @protected + AggregatedProof dco_decode_aggregated_proof(dynamic raw); + + @protected + BlockHeaderData dco_decode_block_header_data(dynamic raw); + @protected bool dco_decode_bool(dynamic raw); + @protected + BlockHeaderData dco_decode_box_autoadd_block_header_data(dynamic raw); + @protected Keypair dco_decode_box_autoadd_keypair(dynamic raw); + @protected + ProofOutputAssignment dco_decode_box_autoadd_proof_output_assignment(dynamic raw); + + @protected + StorageProofData dco_decode_box_autoadd_storage_proof_data(dynamic raw); + + @protected + WormholeError dco_decode_box_autoadd_wormhole_error(dynamic raw); + + @protected + WormholeProofGenerator dco_decode_box_autoadd_wormhole_proof_generator(dynamic raw); + + @protected + WormholeUtxo dco_decode_box_autoadd_wormhole_utxo(dynamic raw); + + @protected + CircuitConfig dco_decode_circuit_config(dynamic raw); + + @protected + CircuitGenerationResult dco_decode_circuit_generation_result(dynamic raw); + + @protected + GeneratedProof dco_decode_generated_proof(dynamic raw); + @protected Keypair dco_decode_keypair(dynamic raw); @@ -51,12 +104,27 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected Uint8List dco_decode_list_prim_u_8_strict(dynamic raw); + @protected + String? dco_decode_opt_String(dynamic raw); + @protected U8Array32? dco_decode_opt_u_8_array_32(dynamic raw); + @protected + ProofOutputAssignment dco_decode_proof_output_assignment(dynamic raw); + + @protected + StorageProofData dco_decode_storage_proof_data(dynamic raw); + @protected int dco_decode_u_16(dynamic raw); + @protected + int dco_decode_u_32(dynamic raw); + + @protected + BigInt dco_decode_u_64(dynamic raw); + @protected int dco_decode_u_8(dynamic raw); @@ -69,25 +137,88 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected BigInt dco_decode_usize(dynamic raw); + @protected + WormholeError dco_decode_wormhole_error(dynamic raw); + + @protected + WormholePairResult dco_decode_wormhole_pair_result(dynamic raw); + + @protected + WormholeProofGenerator dco_decode_wormhole_proof_generator(dynamic raw); + + @protected + WormholeUtxo dco_decode_wormhole_utxo(dynamic raw); + @protected HdLatticeError sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( SseDeserializer deserializer, ); + @protected + WormholeProofAggregator + sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + SseDeserializer deserializer, + ); + + @protected + WormholeProofAggregator + sse_decode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + SseDeserializer deserializer, + ); + @protected HdLatticeError sse_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( SseDeserializer deserializer, ); + @protected + WormholeProofAggregator + sse_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + SseDeserializer deserializer, + ); + @protected String sse_decode_String(SseDeserializer deserializer); + @protected + AggregatedProof sse_decode_aggregated_proof(SseDeserializer deserializer); + + @protected + BlockHeaderData sse_decode_block_header_data(SseDeserializer deserializer); + @protected bool sse_decode_bool(SseDeserializer deserializer); + @protected + BlockHeaderData sse_decode_box_autoadd_block_header_data(SseDeserializer deserializer); + @protected Keypair sse_decode_box_autoadd_keypair(SseDeserializer deserializer); + @protected + ProofOutputAssignment sse_decode_box_autoadd_proof_output_assignment(SseDeserializer deserializer); + + @protected + StorageProofData sse_decode_box_autoadd_storage_proof_data(SseDeserializer deserializer); + + @protected + WormholeError sse_decode_box_autoadd_wormhole_error(SseDeserializer deserializer); + + @protected + WormholeProofGenerator sse_decode_box_autoadd_wormhole_proof_generator(SseDeserializer deserializer); + + @protected + WormholeUtxo sse_decode_box_autoadd_wormhole_utxo(SseDeserializer deserializer); + + @protected + CircuitConfig sse_decode_circuit_config(SseDeserializer deserializer); + + @protected + CircuitGenerationResult sse_decode_circuit_generation_result(SseDeserializer deserializer); + + @protected + GeneratedProof sse_decode_generated_proof(SseDeserializer deserializer); + @protected Keypair sse_decode_keypair(SseDeserializer deserializer); @@ -100,12 +231,27 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected Uint8List sse_decode_list_prim_u_8_strict(SseDeserializer deserializer); + @protected + String? sse_decode_opt_String(SseDeserializer deserializer); + @protected U8Array32? sse_decode_opt_u_8_array_32(SseDeserializer deserializer); + @protected + ProofOutputAssignment sse_decode_proof_output_assignment(SseDeserializer deserializer); + + @protected + StorageProofData sse_decode_storage_proof_data(SseDeserializer deserializer); + @protected int sse_decode_u_16(SseDeserializer deserializer); + @protected + int sse_decode_u_32(SseDeserializer deserializer); + + @protected + BigInt sse_decode_u_64(SseDeserializer deserializer); + @protected int sse_decode_u_8(SseDeserializer deserializer); @@ -118,6 +264,18 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected BigInt sse_decode_usize(SseDeserializer deserializer); + @protected + WormholeError sse_decode_wormhole_error(SseDeserializer deserializer); + + @protected + WormholePairResult sse_decode_wormhole_pair_result(SseDeserializer deserializer); + + @protected + WormholeProofGenerator sse_decode_wormhole_proof_generator(SseDeserializer deserializer); + + @protected + WormholeUtxo sse_decode_wormhole_utxo(SseDeserializer deserializer); + @protected int sse_decode_i_32(SseDeserializer deserializer); @@ -127,21 +285,72 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { SseSerializer serializer, ); + @protected + void sse_encode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ); + + @protected + void sse_encode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ); + @protected void sse_encode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( HdLatticeError self, SseSerializer serializer, ); + @protected + void sse_encode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ); + @protected void sse_encode_String(String self, SseSerializer serializer); + @protected + void sse_encode_aggregated_proof(AggregatedProof self, SseSerializer serializer); + + @protected + void sse_encode_block_header_data(BlockHeaderData self, SseSerializer serializer); + @protected void sse_encode_bool(bool self, SseSerializer serializer); + @protected + void sse_encode_box_autoadd_block_header_data(BlockHeaderData self, SseSerializer serializer); + @protected void sse_encode_box_autoadd_keypair(Keypair self, SseSerializer serializer); + @protected + void sse_encode_box_autoadd_proof_output_assignment(ProofOutputAssignment self, SseSerializer serializer); + + @protected + void sse_encode_box_autoadd_storage_proof_data(StorageProofData self, SseSerializer serializer); + + @protected + void sse_encode_box_autoadd_wormhole_error(WormholeError self, SseSerializer serializer); + + @protected + void sse_encode_box_autoadd_wormhole_proof_generator(WormholeProofGenerator self, SseSerializer serializer); + + @protected + void sse_encode_box_autoadd_wormhole_utxo(WormholeUtxo self, SseSerializer serializer); + + @protected + void sse_encode_circuit_config(CircuitConfig self, SseSerializer serializer); + + @protected + void sse_encode_circuit_generation_result(CircuitGenerationResult self, SseSerializer serializer); + + @protected + void sse_encode_generated_proof(GeneratedProof self, SseSerializer serializer); + @protected void sse_encode_keypair(Keypair self, SseSerializer serializer); @@ -154,12 +363,27 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected void sse_encode_list_prim_u_8_strict(Uint8List self, SseSerializer serializer); + @protected + void sse_encode_opt_String(String? self, SseSerializer serializer); + @protected void sse_encode_opt_u_8_array_32(U8Array32? self, SseSerializer serializer); + @protected + void sse_encode_proof_output_assignment(ProofOutputAssignment self, SseSerializer serializer); + + @protected + void sse_encode_storage_proof_data(StorageProofData self, SseSerializer serializer); + @protected void sse_encode_u_16(int self, SseSerializer serializer); + @protected + void sse_encode_u_32(int self, SseSerializer serializer); + + @protected + void sse_encode_u_64(BigInt self, SseSerializer serializer); + @protected void sse_encode_u_8(int self, SseSerializer serializer); @@ -172,6 +396,18 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected void sse_encode_usize(BigInt self, SseSerializer serializer); + @protected + void sse_encode_wormhole_error(WormholeError self, SseSerializer serializer); + + @protected + void sse_encode_wormhole_pair_result(WormholePairResult self, SseSerializer serializer); + + @protected + void sse_encode_wormhole_proof_generator(WormholeProofGenerator self, SseSerializer serializer); + + @protected + void sse_encode_wormhole_utxo(WormholeUtxo self, SseSerializer serializer); + @protected void sse_encode_i_32(int self, SseSerializer serializer); } @@ -218,4 +454,38 @@ class RustLibWire implements BaseWire { late final _rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError = _rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeErrorPtr .asFunction)>(); + + void + rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ffi.Pointer ptr, + ) { + return _rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ptr, + ); + } + + late final _rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregatorPtr = + _lookup)>>( + 'frbgen_quantus_sdk_rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator', + ); + late final _rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator = + _rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregatorPtr + .asFunction)>(); + + void + rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ffi.Pointer ptr, + ) { + return _rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ptr, + ); + } + + late final _rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregatorPtr = + _lookup)>>( + 'frbgen_quantus_sdk_rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator', + ); + late final _rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator = + _rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregatorPtr + .asFunction)>(); } diff --git a/quantus_sdk/lib/src/rust/frb_generated.web.dart b/quantus_sdk/lib/src/rust/frb_generated.web.dart index 82ebb48d..4faab012 100644 --- a/quantus_sdk/lib/src/rust/frb_generated.web.dart +++ b/quantus_sdk/lib/src/rust/frb_generated.web.dart @@ -8,6 +8,7 @@ import 'api/crypto.dart'; import 'api/ur.dart'; +import 'api/wormhole.dart'; import 'dart:async'; import 'dart:convert'; import 'frb_generated.dart'; @@ -24,23 +25,75 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { CrossPlatformFinalizerArg get rust_arc_decrement_strong_count_HdLatticeErrorPtr => wire.rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError; + CrossPlatformFinalizerArg get rust_arc_decrement_strong_count_WormholeProofAggregatorPtr => wire + .rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator; + @protected HdLatticeError dco_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( dynamic raw, ); + @protected + WormholeProofAggregator + dco_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + dynamic raw, + ); + + @protected + WormholeProofAggregator + dco_decode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + dynamic raw, + ); + @protected HdLatticeError dco_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError(dynamic raw); + @protected + WormholeProofAggregator + dco_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator(dynamic raw); + @protected String dco_decode_String(dynamic raw); + @protected + AggregatedProof dco_decode_aggregated_proof(dynamic raw); + + @protected + BlockHeaderData dco_decode_block_header_data(dynamic raw); + @protected bool dco_decode_bool(dynamic raw); + @protected + BlockHeaderData dco_decode_box_autoadd_block_header_data(dynamic raw); + @protected Keypair dco_decode_box_autoadd_keypair(dynamic raw); + @protected + ProofOutputAssignment dco_decode_box_autoadd_proof_output_assignment(dynamic raw); + + @protected + StorageProofData dco_decode_box_autoadd_storage_proof_data(dynamic raw); + + @protected + WormholeError dco_decode_box_autoadd_wormhole_error(dynamic raw); + + @protected + WormholeProofGenerator dco_decode_box_autoadd_wormhole_proof_generator(dynamic raw); + + @protected + WormholeUtxo dco_decode_box_autoadd_wormhole_utxo(dynamic raw); + + @protected + CircuitConfig dco_decode_circuit_config(dynamic raw); + + @protected + CircuitGenerationResult dco_decode_circuit_generation_result(dynamic raw); + + @protected + GeneratedProof dco_decode_generated_proof(dynamic raw); + @protected Keypair dco_decode_keypair(dynamic raw); @@ -53,12 +106,27 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected Uint8List dco_decode_list_prim_u_8_strict(dynamic raw); + @protected + String? dco_decode_opt_String(dynamic raw); + @protected U8Array32? dco_decode_opt_u_8_array_32(dynamic raw); + @protected + ProofOutputAssignment dco_decode_proof_output_assignment(dynamic raw); + + @protected + StorageProofData dco_decode_storage_proof_data(dynamic raw); + @protected int dco_decode_u_16(dynamic raw); + @protected + int dco_decode_u_32(dynamic raw); + + @protected + BigInt dco_decode_u_64(dynamic raw); + @protected int dco_decode_u_8(dynamic raw); @@ -71,25 +139,88 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected BigInt dco_decode_usize(dynamic raw); + @protected + WormholeError dco_decode_wormhole_error(dynamic raw); + + @protected + WormholePairResult dco_decode_wormhole_pair_result(dynamic raw); + + @protected + WormholeProofGenerator dco_decode_wormhole_proof_generator(dynamic raw); + + @protected + WormholeUtxo dco_decode_wormhole_utxo(dynamic raw); + @protected HdLatticeError sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( SseDeserializer deserializer, ); + @protected + WormholeProofAggregator + sse_decode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + SseDeserializer deserializer, + ); + + @protected + WormholeProofAggregator + sse_decode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + SseDeserializer deserializer, + ); + @protected HdLatticeError sse_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( SseDeserializer deserializer, ); + @protected + WormholeProofAggregator + sse_decode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + SseDeserializer deserializer, + ); + @protected String sse_decode_String(SseDeserializer deserializer); + @protected + AggregatedProof sse_decode_aggregated_proof(SseDeserializer deserializer); + + @protected + BlockHeaderData sse_decode_block_header_data(SseDeserializer deserializer); + @protected bool sse_decode_bool(SseDeserializer deserializer); + @protected + BlockHeaderData sse_decode_box_autoadd_block_header_data(SseDeserializer deserializer); + @protected Keypair sse_decode_box_autoadd_keypair(SseDeserializer deserializer); + @protected + ProofOutputAssignment sse_decode_box_autoadd_proof_output_assignment(SseDeserializer deserializer); + + @protected + StorageProofData sse_decode_box_autoadd_storage_proof_data(SseDeserializer deserializer); + + @protected + WormholeError sse_decode_box_autoadd_wormhole_error(SseDeserializer deserializer); + + @protected + WormholeProofGenerator sse_decode_box_autoadd_wormhole_proof_generator(SseDeserializer deserializer); + + @protected + WormholeUtxo sse_decode_box_autoadd_wormhole_utxo(SseDeserializer deserializer); + + @protected + CircuitConfig sse_decode_circuit_config(SseDeserializer deserializer); + + @protected + CircuitGenerationResult sse_decode_circuit_generation_result(SseDeserializer deserializer); + + @protected + GeneratedProof sse_decode_generated_proof(SseDeserializer deserializer); + @protected Keypair sse_decode_keypair(SseDeserializer deserializer); @@ -102,12 +233,27 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected Uint8List sse_decode_list_prim_u_8_strict(SseDeserializer deserializer); + @protected + String? sse_decode_opt_String(SseDeserializer deserializer); + @protected U8Array32? sse_decode_opt_u_8_array_32(SseDeserializer deserializer); + @protected + ProofOutputAssignment sse_decode_proof_output_assignment(SseDeserializer deserializer); + + @protected + StorageProofData sse_decode_storage_proof_data(SseDeserializer deserializer); + @protected int sse_decode_u_16(SseDeserializer deserializer); + @protected + int sse_decode_u_32(SseDeserializer deserializer); + + @protected + BigInt sse_decode_u_64(SseDeserializer deserializer); + @protected int sse_decode_u_8(SseDeserializer deserializer); @@ -120,6 +266,18 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected BigInt sse_decode_usize(SseDeserializer deserializer); + @protected + WormholeError sse_decode_wormhole_error(SseDeserializer deserializer); + + @protected + WormholePairResult sse_decode_wormhole_pair_result(SseDeserializer deserializer); + + @protected + WormholeProofGenerator sse_decode_wormhole_proof_generator(SseDeserializer deserializer); + + @protected + WormholeUtxo sse_decode_wormhole_utxo(SseDeserializer deserializer); + @protected int sse_decode_i_32(SseDeserializer deserializer); @@ -129,21 +287,72 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { SseSerializer serializer, ); + @protected + void sse_encode_Auto_Owned_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ); + + @protected + void sse_encode_Auto_Ref_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ); + @protected void sse_encode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( HdLatticeError self, SseSerializer serializer, ); + @protected + void sse_encode_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + WormholeProofAggregator self, + SseSerializer serializer, + ); + @protected void sse_encode_String(String self, SseSerializer serializer); + @protected + void sse_encode_aggregated_proof(AggregatedProof self, SseSerializer serializer); + + @protected + void sse_encode_block_header_data(BlockHeaderData self, SseSerializer serializer); + @protected void sse_encode_bool(bool self, SseSerializer serializer); + @protected + void sse_encode_box_autoadd_block_header_data(BlockHeaderData self, SseSerializer serializer); + @protected void sse_encode_box_autoadd_keypair(Keypair self, SseSerializer serializer); + @protected + void sse_encode_box_autoadd_proof_output_assignment(ProofOutputAssignment self, SseSerializer serializer); + + @protected + void sse_encode_box_autoadd_storage_proof_data(StorageProofData self, SseSerializer serializer); + + @protected + void sse_encode_box_autoadd_wormhole_error(WormholeError self, SseSerializer serializer); + + @protected + void sse_encode_box_autoadd_wormhole_proof_generator(WormholeProofGenerator self, SseSerializer serializer); + + @protected + void sse_encode_box_autoadd_wormhole_utxo(WormholeUtxo self, SseSerializer serializer); + + @protected + void sse_encode_circuit_config(CircuitConfig self, SseSerializer serializer); + + @protected + void sse_encode_circuit_generation_result(CircuitGenerationResult self, SseSerializer serializer); + + @protected + void sse_encode_generated_proof(GeneratedProof self, SseSerializer serializer); + @protected void sse_encode_keypair(Keypair self, SseSerializer serializer); @@ -156,12 +365,27 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected void sse_encode_list_prim_u_8_strict(Uint8List self, SseSerializer serializer); + @protected + void sse_encode_opt_String(String? self, SseSerializer serializer); + @protected void sse_encode_opt_u_8_array_32(U8Array32? self, SseSerializer serializer); + @protected + void sse_encode_proof_output_assignment(ProofOutputAssignment self, SseSerializer serializer); + + @protected + void sse_encode_storage_proof_data(StorageProofData self, SseSerializer serializer); + @protected void sse_encode_u_16(int self, SseSerializer serializer); + @protected + void sse_encode_u_32(int self, SseSerializer serializer); + + @protected + void sse_encode_u_64(BigInt self, SseSerializer serializer); + @protected void sse_encode_u_8(int self, SseSerializer serializer); @@ -174,6 +398,18 @@ abstract class RustLibApiImplPlatform extends BaseApiImpl { @protected void sse_encode_usize(BigInt self, SseSerializer serializer); + @protected + void sse_encode_wormhole_error(WormholeError self, SseSerializer serializer); + + @protected + void sse_encode_wormhole_pair_result(WormholePairResult self, SseSerializer serializer); + + @protected + void sse_encode_wormhole_proof_generator(WormholeProofGenerator self, SseSerializer serializer); + + @protected + void sse_encode_wormhole_utxo(WormholeUtxo self, SseSerializer serializer); + @protected void sse_encode_i_32(int self, SseSerializer serializer); } @@ -196,6 +432,22 @@ class RustLibWire implements BaseWire { .rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError( ptr, ); + + void + rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + int ptr, + ) => wasmModule + .rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ptr, + ); + + void + rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + int ptr, + ) => wasmModule + .rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ptr, + ); } @JS('wasm_bindgen') @@ -209,4 +461,14 @@ extension type RustLibWasmModule._(JSObject _) implements JSObject { external void rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerHDLatticeError(int ptr); + + external void + rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + int ptr, + ); + + external void + rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + int ptr, + ); } diff --git a/quantus_sdk/lib/src/services/balances_service.dart b/quantus_sdk/lib/src/services/balances_service.dart index 1fbec932..6a09a4b9 100644 --- a/quantus_sdk/lib/src/services/balances_service.dart +++ b/quantus_sdk/lib/src/services/balances_service.dart @@ -1,8 +1,8 @@ import 'dart:async'; import 'dart:typed_data'; -import 'package:quantus_sdk/generated/schrodinger/schrodinger.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/sp_runtime/multiaddress/multi_address.dart' as multi_address; +import 'package:quantus_sdk/generated/planck/planck.dart'; +import 'package:quantus_sdk/generated/planck/types/sp_runtime/multiaddress/multi_address.dart' as multi_address; import 'package:quantus_sdk/quantus_sdk.dart'; import 'package:quantus_sdk/src/rust/api/crypto.dart' as crypto; @@ -38,14 +38,14 @@ class BalancesService { } Balances getBalanceTransferCall(String targetAddress, BigInt amount) { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final multiDest = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: targetAddress)); final runtimeCall = quantusApi.tx.balances.transferAllowDeath(dest: multiDest, value: amount); return runtimeCall; } Balances getTransferAllCall(String targetAddress, {bool keepAlive = false}) { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final multiDest = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: targetAddress)); final runtimeCall = quantusApi.tx.balances.transferAll(dest: multiDest, keepAlive: keepAlive); return runtimeCall; diff --git a/quantus_sdk/lib/src/services/circuit_manager.dart b/quantus_sdk/lib/src/services/circuit_manager.dart new file mode 100644 index 00000000..e7eb7a2e --- /dev/null +++ b/quantus_sdk/lib/src/services/circuit_manager.dart @@ -0,0 +1,181 @@ +import 'dart:convert'; +import 'dart:io'; + +import 'package:flutter/services.dart' show rootBundle; +import 'package:path/path.dart' as path; +import 'package:path_provider/path_provider.dart'; + +/// Progress callback for circuit extraction operations. +typedef CircuitProgressCallback = void Function(double progress, String message); + +/// Information about circuit binary status. +class CircuitStatus { + final bool isAvailable; + final String? circuitDir; + final int? totalSizeBytes; + final String? version; + + /// Number of leaf proofs per aggregation batch (read from config.json) + final int? numLeafProofs; + + const CircuitStatus({ + required this.isAvailable, + this.circuitDir, + this.totalSizeBytes, + this.version, + this.numLeafProofs, + }); + + static const unavailable = CircuitStatus(isAvailable: false); +} + +/// Manages circuit binary files for ZK proof generation. +/// +/// Circuit binaries (~163MB) are bundled with the SDK in assets/circuits/ +/// and extracted to the app's support directory on first use. +class CircuitManager { + // Circuit files required for proof generation (bundled in SDK assets/circuits/) + static const List requiredFiles = [ + 'prover.bin', + 'common.bin', + 'verifier.bin', + 'dummy_proof.bin', + 'aggregated_common.bin', + 'aggregated_verifier.bin', + 'config.json', + ]; + + // Asset path prefix for SDK package assets + static const String _assetPrefix = 'packages/quantus_sdk/assets/circuits'; + + /// Get the directory where extracted circuit files are stored. + /// Uses the app's support directory for persistent storage. + static Future getCircuitDirectory() async { + final appDir = await getApplicationSupportDirectory(); + return path.join(appDir.path, 'circuits'); + } + + /// Check if circuit files are available (extracted from assets). + Future checkStatus() async { + try { + final circuitDir = await getCircuitDirectory(); + final dir = Directory(circuitDir); + + if (!await dir.exists()) { + return CircuitStatus.unavailable; + } + + // Check all required files exist + int totalSize = 0; + for (final fileName in requiredFiles) { + final file = File(path.join(circuitDir, fileName)); + if (!await file.exists()) { + return CircuitStatus.unavailable; + } + totalSize += await file.length(); + } + + // Read config from config.json + String? version; + int? numLeafProofs; + try { + final configFile = File(path.join(circuitDir, 'config.json')); + if (await configFile.exists()) { + final content = await configFile.readAsString(); + final config = jsonDecode(content) as Map; + version = config['version'] as String?; + numLeafProofs = config['num_leaf_proofs'] as int?; + } + } catch (e) { + // Ignore config read errors + } + + return CircuitStatus( + isAvailable: true, + circuitDir: circuitDir, + totalSizeBytes: totalSize, + version: version, + numLeafProofs: numLeafProofs, + ); + } catch (e) { + return CircuitStatus.unavailable; + } + } + + /// Extract bundled circuit files from SDK assets to the filesystem. + /// + /// This is required because the Rust FFI code needs file paths to access + /// the circuit binaries. Flutter assets cannot be accessed via file paths + /// directly, so we extract them to the app's support directory. + /// + /// This is a fast operation (~10 seconds) since we're just copying files, + /// not generating circuits. + Future extractCircuitsFromAssets({CircuitProgressCallback? onProgress}) async { + try { + final circuitDir = await getCircuitDirectory(); + final dir = Directory(circuitDir); + + // Create directory if needed + if (!await dir.exists()) { + await dir.create(recursive: true); + } + + onProgress?.call(0.0, 'Extracting circuit files...'); + + int extracted = 0; + for (final fileName in requiredFiles) { + final progress = extracted / requiredFiles.length; + onProgress?.call(progress, 'Extracting $fileName...'); + + try { + // Load from bundled SDK assets + final assetPath = '$_assetPrefix/$fileName'; + final byteData = await rootBundle.load(assetPath); + + // Write to filesystem + final targetFile = File(path.join(circuitDir, fileName)); + await targetFile.writeAsBytes( + byteData.buffer.asUint8List(byteData.offsetInBytes, byteData.lengthInBytes), + flush: true, + ); + } catch (e) { + // Clean up on failure + await deleteCircuits(); + onProgress?.call(0.0, 'Failed to extract $fileName'); + return false; + } + + extracted++; + } + + onProgress?.call(1.0, 'Circuit files ready!'); + return true; + } catch (e) { + onProgress?.call(0.0, 'Extraction failed: $e'); + return false; + } + } + + /// Delete extracted circuit files (for cleanup or re-extraction). + Future deleteCircuits() async { + try { + final circuitDir = await getCircuitDirectory(); + final dir = Directory(circuitDir); + if (await dir.exists()) { + await dir.delete(recursive: true); + } + } catch (e) { + // Ignore deletion errors + } + } + + /// Get human-readable size string. + static String formatBytes(int bytes) { + if (bytes < 1024) return '$bytes B'; + if (bytes < 1024 * 1024) return '${(bytes / 1024).toStringAsFixed(1)} KB'; + if (bytes < 1024 * 1024 * 1024) { + return '${(bytes / (1024 * 1024)).toStringAsFixed(1)} MB'; + } + return '${(bytes / (1024 * 1024 * 1024)).toStringAsFixed(1)} GB'; + } +} diff --git a/quantus_sdk/lib/src/services/high_security_service.dart b/quantus_sdk/lib/src/services/high_security_service.dart index 52f50092..58b6618d 100644 --- a/quantus_sdk/lib/src/services/high_security_service.dart +++ b/quantus_sdk/lib/src/services/high_security_service.dart @@ -2,8 +2,8 @@ import 'dart:async'; import 'dart:typed_data'; import 'package:collection/collection.dart'; -import 'package:quantus_sdk/generated/schrodinger/schrodinger.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/qp_scheduler/block_number_or_timestamp.dart' as qp; +import 'package:quantus_sdk/generated/planck/planck.dart'; +import 'package:quantus_sdk/generated/planck/types/qp_scheduler/block_number_or_timestamp.dart' as qp; import 'package:quantus_sdk/quantus_sdk.dart'; import 'package:quantus_sdk/src/extensions/address_extension.dart'; import 'package:quantus_sdk/src/extensions/duration_extension.dart'; @@ -119,7 +119,7 @@ class HighSecurityService { final recoveryService = RecoveryService(); final balancesService = BalancesService(); - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); // 1. Initiate recovery (rescuer = guardian) calls.add(recoveryService.getInitiateRecoveryCall(lostAccountAddress)); diff --git a/quantus_sdk/lib/src/services/mnemonic_provider.dart b/quantus_sdk/lib/src/services/mnemonic_provider.dart new file mode 100644 index 00000000..6f5f22cc --- /dev/null +++ b/quantus_sdk/lib/src/services/mnemonic_provider.dart @@ -0,0 +1,26 @@ +/// Abstract interface for providing mnemonic phrases. +/// +/// This allows different apps to provide mnemonics from different sources: +/// - Miner app: from secure storage with rewards preimage file +/// - Mobile app: from secure storage with biometric protection +/// - Tests: from memory +abstract class MnemonicProvider { + /// Get the mnemonic phrase, or null if not available. + Future getMnemonic(); + + /// Check if a mnemonic is available without retrieving it. + Future hasMnemonic(); +} + +/// Simple in-memory mnemonic provider for testing. +class InMemoryMnemonicProvider implements MnemonicProvider { + final String? _mnemonic; + + const InMemoryMnemonicProvider(this._mnemonic); + + @override + Future getMnemonic() async => _mnemonic; + + @override + Future hasMnemonic() async => _mnemonic != null; +} diff --git a/quantus_sdk/lib/src/services/network/redundant_endpoint.dart b/quantus_sdk/lib/src/services/network/redundant_endpoint.dart index 84ea5464..9d451bcb 100644 --- a/quantus_sdk/lib/src/services/network/redundant_endpoint.dart +++ b/quantus_sdk/lib/src/services/network/redundant_endpoint.dart @@ -22,6 +22,19 @@ class GraphQlEndpointService extends RedundantEndpointService { GraphQlEndpointService._internal() : super(endpoints: AppConstants.graphQlEndpoints.map((e) => Endpoint(url: e)).toList()); + + /// Override the endpoints with custom URLs. + /// Useful for local development or connecting to different chains. + void setEndpoints(List urls) { + endpoints.clear(); + endpoints.addAll(urls.map((e) => Endpoint(url: e))); + } + + /// Reset to default endpoints from AppConstants. + void resetToDefaults() { + endpoints.clear(); + endpoints.addAll(AppConstants.graphQlEndpoints.map((e) => Endpoint(url: e))); + } } class RpcEndpointService extends RedundantEndpointService { @@ -31,11 +44,32 @@ class RpcEndpointService extends RedundantEndpointService { RpcEndpointService._internal() : super(endpoints: AppConstants.rpcEndpoints.map((e) => Endpoint(url: e)).toList()); - String get bestEndpointUrl => endpoints.first.url; + String get bestEndpointUrl { + if (endpoints.isEmpty) { + throw StateError( + 'RpcEndpointService has no endpoints configured. ' + 'Call setEndpoints() first or check AppConstants.rpcEndpoints.', + ); + } + return endpoints.first.url; + } Future rpcTask(Future Function(Uri uri) task) async { return _executeTask((url) => task(Uri.parse(url))); } + + /// Override the endpoints with custom URLs. + /// Useful for local development or connecting to different chains. + void setEndpoints(List urls) { + endpoints.clear(); + endpoints.addAll(urls.map((e) => Endpoint(url: e))); + } + + /// Reset to default endpoints from AppConstants. + void resetToDefaults() { + endpoints.clear(); + endpoints.addAll(AppConstants.rpcEndpoints.map((e) => Endpoint(url: e))); + } } class RedundantEndpointService { diff --git a/quantus_sdk/lib/src/services/recovery_service.dart b/quantus_sdk/lib/src/services/recovery_service.dart index 3d366245..86ed9c1d 100644 --- a/quantus_sdk/lib/src/services/recovery_service.dart +++ b/quantus_sdk/lib/src/services/recovery_service.dart @@ -1,10 +1,10 @@ import 'dart:async'; import 'dart:typed_data'; -import 'package:quantus_sdk/generated/schrodinger/schrodinger.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/pallet_recovery/active_recovery.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/pallet_recovery/recovery_config.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/sp_runtime/multiaddress/multi_address.dart' as multi_address; +import 'package:quantus_sdk/generated/planck/planck.dart'; +import 'package:quantus_sdk/generated/planck/types/pallet_recovery/active_recovery.dart'; +import 'package:quantus_sdk/generated/planck/types/pallet_recovery/recovery_config.dart'; +import 'package:quantus_sdk/generated/planck/types/sp_runtime/multiaddress/multi_address.dart' as multi_address; import 'package:quantus_sdk/quantus_sdk.dart'; import 'package:quantus_sdk/src/rust/api/crypto.dart' as crypto; @@ -16,7 +16,7 @@ class RecoveryService { final SubstrateService _substrateService = SubstrateService(); - final dummyQuantusApi = Schrodinger.url(Uri.parse(AppConstants.rpcEndpoints[0])); + final dummyQuantusApi = Planck.url(Uri.parse(AppConstants.rpcEndpoints[0])); late final BigInt configDepositBase = dummyQuantusApi.constant.recovery.configDepositBase; late final BigInt friendDepositFactor = dummyQuantusApi.constant.recovery.friendDepositFactor; late final int maxFriends = dummyQuantusApi.constant.recovery.maxFriends; @@ -31,7 +31,7 @@ class RecoveryService { required int delayPeriod, }) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final friends = friendAddresses.map((addr) => crypto.ss58ToAccountId(s: addr)).toList(); // Create the call @@ -61,7 +61,7 @@ class RecoveryService { } RuntimeCall getInitiateRecoveryCall(String lostAccountAddress) { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final lostAccount = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: lostAccountAddress)); return quantusApi.tx.recovery.initiateRecovery(account: lostAccount); } @@ -83,7 +83,7 @@ class RecoveryService { } RuntimeCall getVouchRecoveryCall(String lostAccountAddress, String rescuerAddress) { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final lostAccount = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: lostAccountAddress)); final rescuer = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: rescuerAddress)); return quantusApi.tx.recovery.vouchRecovery(lost: lostAccount, rescuer: rescuer); @@ -102,7 +102,7 @@ class RecoveryService { } RuntimeCall getClaimRecoveryCall(String lostAccountAddress) { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final lostAccount = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: lostAccountAddress)); return quantusApi.tx.recovery.claimRecovery(account: lostAccount); } @@ -110,7 +110,7 @@ class RecoveryService { /// Close an active recovery process (called by the lost account owner) Future closeRecovery({required Account lostAccount, required String rescuerAddress}) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final rescuer = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: rescuerAddress)); // Create the call @@ -126,7 +126,7 @@ class RecoveryService { /// Remove recovery configuration from account Future removeRecoveryConfig({required Account senderAccount}) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); // Create the call final call = quantusApi.tx.recovery.removeRecovery(); @@ -155,7 +155,7 @@ class RecoveryService { } RuntimeCall getAsRecoveredCall(String recoveredAccountAddress, RuntimeCall call) { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final recoveredAccount = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: recoveredAccountAddress)); return quantusApi.tx.recovery.asRecovered(account: recoveredAccount, call: call); } @@ -163,7 +163,7 @@ class RecoveryService { /// Cancel the ability to use a recovered account Future cancelRecovered({required Account rescuerAccount, required String recoveredAccountAddress}) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final recoveredAccount = const multi_address.$MultiAddress().id( crypto.ss58ToAccountId(s: recoveredAccountAddress), ); @@ -181,7 +181,7 @@ class RecoveryService { /// Query recovery configuration for an account Future getRecoveryConfig(String address) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final accountId = crypto.ss58ToAccountId(s: address); return await quantusApi.query.recovery.recoverable(accountId); @@ -193,7 +193,7 @@ class RecoveryService { /// Query active recovery process Future getActiveRecovery(String lostAccountAddress, String rescuerAddress) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final lostAccountId = crypto.ss58ToAccountId(s: lostAccountAddress); final rescuerId = crypto.ss58ToAccountId(s: rescuerAddress); @@ -206,7 +206,7 @@ class RecoveryService { /// Check if an account can act as proxy for a recovered account Future getProxyRecoveredAccount(String proxyAddress) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final proxyId = crypto.ss58ToAccountId(s: proxyAddress); final recoveredAccountId = await quantusApi.query.recovery.proxy(proxyId); @@ -264,7 +264,7 @@ class RecoveryService { /// Get recovery constants Future> getConstants() async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final constants = quantusApi.constant.recovery; return { @@ -280,7 +280,7 @@ class RecoveryService { /// Helper to create a balance transfer call for recovered account Balances createBalanceTransferCall(String recipientAddress, BigInt amount) { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final accountID = crypto.ss58ToAccountId(s: recipientAddress); final dest = const multi_address.$MultiAddress().id(accountID); final call = quantusApi.tx.balances.transferAllowDeath(dest: dest, value: amount); diff --git a/quantus_sdk/lib/src/services/reversible_transfers_service.dart b/quantus_sdk/lib/src/services/reversible_transfers_service.dart index b7562f38..d56a3193 100644 --- a/quantus_sdk/lib/src/services/reversible_transfers_service.dart +++ b/quantus_sdk/lib/src/services/reversible_transfers_service.dart @@ -3,13 +3,13 @@ import 'dart:typed_data'; import 'package:convert/convert.dart'; import 'package:polkadart/polkadart.dart'; -import 'package:quantus_sdk/generated/schrodinger/schrodinger.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/pallet_reversible_transfers/high_security_account_data.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/pallet_reversible_transfers/pending_transfer.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/primitive_types/h256.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/qp_scheduler/block_number_or_timestamp.dart' as qp; -import 'package:quantus_sdk/generated/schrodinger/types/quantus_runtime/runtime_call.dart'; -import 'package:quantus_sdk/generated/schrodinger/types/sp_runtime/multiaddress/multi_address.dart' as multi_address; +import 'package:quantus_sdk/generated/planck/planck.dart'; +import 'package:quantus_sdk/generated/planck/types/pallet_reversible_transfers/high_security_account_data.dart'; +import 'package:quantus_sdk/generated/planck/types/pallet_reversible_transfers/pending_transfer.dart'; +import 'package:quantus_sdk/generated/planck/types/primitive_types/h256.dart'; +import 'package:quantus_sdk/generated/planck/types/qp_scheduler/block_number_or_timestamp.dart' as qp; +import 'package:quantus_sdk/generated/planck/types/quantus_runtime/runtime_call.dart'; +import 'package:quantus_sdk/generated/planck/types/sp_runtime/multiaddress/multi_address.dart' as multi_address; import 'package:quantus_sdk/src/extensions/address_extension.dart'; import 'package:quantus_sdk/src/extensions/duration_extension.dart'; import 'package:quantus_sdk/src/models/account.dart'; @@ -33,7 +33,7 @@ class ReversibleTransfersService { required BigInt amount, }) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final multiDest = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: recipientAddress)); // Create the call @@ -79,7 +79,7 @@ class ReversibleTransfersService { BigInt amount, qp.BlockNumberOrTimestamp delay, ) { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final multiDest = const multi_address.$MultiAddress().id(crypto.ss58ToAccountId(s: recipientAddress)); final call = quantusApi.tx.reversibleTransfers.scheduleTransferWithDelay( @@ -111,7 +111,7 @@ class ReversibleTransfersService { /// Cancel a pending reversible transaction (theft deterrence - reverse a transaction) Future cancelReversibleTransfer({required Account account, required H256 transactionId}) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); // Create the call final call = quantusApi.tx.reversibleTransfers.cancel(txId: transactionId); @@ -127,7 +127,7 @@ class ReversibleTransfersService { Future getHighSecurityConfig(String address) async { print('getHighSecurityConfig: $address'); try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final accountId = crypto.ss58ToAccountId(s: address); return await quantusApi.query.reversibleTransfers.highSecurityAccounts(accountId); @@ -139,7 +139,7 @@ class ReversibleTransfersService { /// Query pending transfer details Future getPendingTransfer(H256 transactionId) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); return await quantusApi.query.reversibleTransfers.pendingTransfers(transactionId); } catch (e) { @@ -150,7 +150,7 @@ class ReversibleTransfersService { /// Get account's pending transaction index Future getAccountPendingIndex(String address) async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final accountId = crypto.ss58ToAccountId(s: address); return await quantusApi.query.reversibleTransfers.accountPendingIndex(accountId); @@ -194,7 +194,7 @@ class ReversibleTransfersService { /// Get constants related to reversible transfers Future> getConstants() async { try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final constants = quantusApi.constant.reversibleTransfers; return { @@ -225,7 +225,7 @@ class ReversibleTransfersService { : delay.toJson().toString(); print('setHighSecurity: ${account.accountId}, $guardianAccountId, $delayValue'); try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final guardianAccountId32 = crypto.ss58ToAccountId(s: guardianAccountId); // Create the call @@ -272,7 +272,7 @@ class ReversibleTransfersService { print('getInterceptedAccounts: $guardianAddress'); try { - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final accountId = crypto.ss58ToAccountId(s: guardianAddress); final interceptedAccounts = await quantusApi.query.reversibleTransfers.interceptorIndex(accountId); @@ -298,7 +298,7 @@ class ReversibleTransfersService { Duration safeguardDuration, ) async { final delay = safeguardDuration.qpTimestamp; - final quantusApi = Schrodinger(_substrateService.provider!); + final quantusApi = Planck(_substrateService.provider!); final call = quantusApi.tx.reversibleTransfers.setHighSecurity( delay: delay, interceptor: crypto.ss58ToAccountId(s: guardianAccountId), diff --git a/quantus_sdk/lib/src/services/substrate_service.dart b/quantus_sdk/lib/src/services/substrate_service.dart index 2899674b..591712aa 100644 --- a/quantus_sdk/lib/src/services/substrate_service.dart +++ b/quantus_sdk/lib/src/services/substrate_service.dart @@ -5,7 +5,8 @@ import 'package:bip39_mnemonic/bip39_mnemonic.dart'; import 'package:convert/convert.dart'; import 'package:flutter/foundation.dart'; import 'package:polkadart/polkadart.dart'; -import 'package:quantus_sdk/generated/schrodinger/schrodinger.dart'; +import 'package:polkadart/scale_codec.dart'; +import 'package:quantus_sdk/generated/planck/planck.dart'; import 'package:quantus_sdk/quantus_sdk.dart'; import 'package:quantus_sdk/src/resonance_extrinsic_payload.dart'; import 'package:quantus_sdk/src/rust/api/crypto.dart' as crypto; @@ -72,7 +73,7 @@ class SubstrateService { final accountInfo = await _rpcEndpointService.rpcTask((uri) async { final provider = Provider.fromUri(uri); - final quantusApi = Schrodinger(provider); + final quantusApi = Planck(provider); return await quantusApi.query.system.account(accountID); }); @@ -84,6 +85,90 @@ class SubstrateService { } } + /// Query balance using raw RPC calls instead of generated metadata. + /// This is useful when the chain metadata doesn't match the generated code. + Future queryBalanceRaw(String address) async { + try { + final accountID = crypto.ss58ToAccountId(s: address); + + final result = await _rpcEndpointService.rpcTask((uri) async { + final provider = Provider.fromUri(uri); + + // Build the storage key for System::Account + // twox128("System") ++ twox128("Account") ++ blake2_128_concat(account_id) + const systemPrefix = '26aa394eea5630e07c48ae0c9558cef7'; + const accountPrefix = 'b99d880ec681799c0cf30e8886371da9'; + + // blake2_128_concat = blake2_128(data) ++ data + final accountIdHex = hex.encode(accountID); + final blake2Hash = _blake2b128Hex(accountID); + + final storageKey = '0x$systemPrefix$accountPrefix$blake2Hash$accountIdHex'; + + // Query storage + final response = await provider.send('state_getStorage', [storageKey]); + return response.result as String?; + }); + + if (result == null) { + // Account doesn't exist, balance is 0 + print('Account $address not found, returning 0'); + return BigInt.zero; + } + + // Decode the AccountInfo structure + // AccountInfo { nonce: u32, consumers: u32, providers: u32, sufficients: u32, data: AccountData } + // AccountData { free: u128, reserved: u128, frozen: u128, flags: u128 } + final balance = _decodeAccountBalance(result); + print('user balance (raw) $address: $balance'); + return balance; + } catch (e, st) { + print('Error querying balance (raw): $e, $st'); + throw Exception('Failed to query balance: $e'); + } + } + + /// Compute blake2b-128 hash and return as hex string + String _blake2b128Hex(Uint8List data) { + // Use the Blake2bHash from polkadart + final hasher = Hasher.blake2b128; + final hash = hasher.hash(data); + return hex.encode(hash); + } + + /// Decode AccountInfo hex to extract free balance + BigInt _decodeAccountBalance(String hexData) { + // Remove 0x prefix + String hexStr = hexData.startsWith('0x') ? hexData.substring(2) : hexData; + + // AccountInfo structure (SCALE encoded): + // - nonce: u32 (4 bytes, little-endian) + // - consumers: u32 (4 bytes) + // - providers: u32 (4 bytes) + // - sufficients: u32 (4 bytes) + // - data.free: u128 (16 bytes, little-endian) + // - data.reserved: u128 (16 bytes) + // - data.frozen: u128 (16 bytes) + // - data.flags: u128 (16 bytes) + + // Skip to free balance: offset = 4 + 4 + 4 + 4 = 16 bytes = 32 hex chars + if (hexStr.length < 64) { + throw Exception('AccountInfo hex too short: ${hexStr.length}'); + } + + // Extract free balance (16 bytes = 32 hex chars, little-endian) + final freeHex = hexStr.substring(32, 64); + + // Convert little-endian hex to BigInt + final bytes = hex.decode(freeHex); + BigInt value = BigInt.zero; + for (int i = bytes.length - 1; i >= 0; i--) { + value = (value << 8) + BigInt.from(bytes[i]); + } + + return value; + } + Uint8List _combineSignatureAndPubkey(List signature, List pubkey) { final result = Uint8List(signature.length + pubkey.length); result.setAll(0, signature); @@ -226,7 +311,7 @@ class SubstrateService { final registry = await _rpcEndpointService.rpcTask((uri) async { final provider = Provider.fromUri(uri); - return Schrodinger(provider).registry; + return Planck(provider).registry; }); final payload = payloadToSign.encode(registry); @@ -304,7 +389,7 @@ class SubstrateService { final registry = await _rpcEndpointService.rpcTask((uri) async { final provider = Provider.fromUri(uri); - return Schrodinger(provider).registry; + return Planck(provider).registry; }); return UnsignedTransactionData(payloadToSign: payloadToSign, signer: accountIdBytes, registry: registry); @@ -332,6 +417,25 @@ class SubstrateService { return await _submitExtrinsic(extrinsic); } + Future submitUnsignedExtrinsic(RuntimeCall call) async { + final registry = await _rpcEndpointService.rpcTask((uri) async { + final provider = Provider.fromUri(uri); + return Planck(provider).registry; + }); + final int versionByte = registry.extrinsicVersion & 127; + + final callData = call.encode(); // Uint8List + // 4. Encode as unsigned/bare extrinsic + // final encoder = ExtrinsicEncoder(chainInfo); + // final unsignedExtrinsic = encoder.encodeUnsigned(callData); // adds version byte (0x04 for V4, 0x05 for V5) + final output = ByteOutput() + ..pushByte(versionByte) + ..write(call.encode()); + final extrinsic = U8SequenceCodec.codec.encode(output.toBytes()); + return await _submitExtrinsic(extrinsic); + } + + Future _getNextAccountNonceFromAddress(String address) async { final nonceResult = await _rpcEndpointService.rpcTask((uri) async { final provider = Provider.fromUri(uri); diff --git a/quantus_sdk/lib/src/services/wormhole_address_manager.dart b/quantus_sdk/lib/src/services/wormhole_address_manager.dart new file mode 100644 index 00000000..3635948d --- /dev/null +++ b/quantus_sdk/lib/src/services/wormhole_address_manager.dart @@ -0,0 +1,310 @@ +import 'dart:convert'; +import 'dart:io'; + +import 'package:path_provider/path_provider.dart'; +import 'package:quantus_sdk/src/services/mnemonic_provider.dart'; +import 'package:quantus_sdk/src/services/wormhole_service.dart'; + +/// Purpose values for wormhole HD derivation. +class WormholeAddressPurpose { + /// Change addresses for wormhole withdrawals. + static const int change = 0; + + /// Miner rewards (primary address). + static const int minerRewards = 1; +} + +/// Information about a tracked wormhole address. +class TrackedWormholeAddress { + /// The wormhole address (SS58 format). + final String address; + + /// The HD derivation purpose. + final int purpose; + + /// The HD derivation index. + final int index; + + /// The secret for this address (hex encoded, needed for proofs). + final String secretHex; + + /// Whether this is the primary miner rewards address. + bool get isPrimary => + purpose == WormholeAddressPurpose.minerRewards && index == 0; + + const TrackedWormholeAddress({ + required this.address, + required this.purpose, + required this.index, + required this.secretHex, + }); + + Map toJson() => { + 'address': address, + 'purpose': purpose, + 'index': index, + 'secretHex': secretHex, + }; + + factory TrackedWormholeAddress.fromJson(Map json) { + return TrackedWormholeAddress( + address: json['address'] as String, + purpose: json['purpose'] as int, + index: json['index'] as int, + secretHex: json['secretHex'] as String, + ); + } + + @override + String toString() => + 'TrackedWormholeAddress($address, purpose=$purpose, index=$index)'; + + @override + bool operator ==(Object other) { + if (identical(this, other)) return true; + return other is TrackedWormholeAddress && other.address == address; + } + + @override + int get hashCode => address.hashCode; +} + +/// Manages multiple wormhole addresses for a wallet. +/// +/// This service tracks: +/// - The primary miner rewards address (purpose=1, index=0) +/// - Change addresses generated during partial withdrawals (purpose=0, index=N) +/// +/// All addresses are derived from the same mnemonic using HD derivation. +/// +/// ## Usage +/// +/// ```dart +/// // Create with a mnemonic provider +/// final manager = WormholeAddressManager( +/// mnemonicProvider: myMnemonicProvider, +/// ); +/// +/// // Initialize (loads from disk and ensures primary address exists) +/// await manager.initialize(); +/// +/// // Get all tracked addresses +/// final addresses = manager.allAddresses; +/// +/// // Derive a new change address for partial withdrawals +/// final changeAddr = await manager.deriveNextChangeAddress(); +/// ``` +class WormholeAddressManager { + static const String _storageFileName = 'wormhole_addresses.json'; + + final MnemonicProvider _mnemonicProvider; + final WormholeService _wormholeService; + + /// Optional custom storage directory. If null, uses app support directory. + final String? _customStorageDir; + + /// All tracked addresses, keyed by SS58 address. + final Map _addresses = {}; + + /// The next change address index to use. + int _nextChangeIndex = 0; + + /// Creates a new WormholeAddressManager. + /// + /// [mnemonicProvider] is required to derive addresses from the mnemonic. + /// [wormholeService] is optional and defaults to a new instance. + /// [storageDir] is optional for custom storage location (useful for tests). + WormholeAddressManager({ + required MnemonicProvider mnemonicProvider, + WormholeService? wormholeService, + String? storageDir, + }) : _mnemonicProvider = mnemonicProvider, + _wormholeService = wormholeService ?? WormholeService(), + _customStorageDir = storageDir; + + /// Get all tracked addresses. + List get allAddresses => _addresses.values.toList(); + + /// Get all address strings (SS58 format). + Set get allAddressStrings => _addresses.keys.toSet(); + + /// Get the primary miner rewards address. + TrackedWormholeAddress? get primaryAddress { + return _addresses.values.where((a) => a.isPrimary).firstOrNull; + } + + /// Get a tracked address by its SS58 string. + TrackedWormholeAddress? getAddress(String address) => _addresses[address]; + + /// Check if an address is tracked. + bool isTracked(String address) => _addresses.containsKey(address); + + /// Initialize the manager and load tracked addresses. + /// + /// This should be called on app startup after the wallet is set up. + Future initialize() async { + await _loadFromDisk(); + + // Ensure the primary address is tracked + final mnemonic = await _mnemonicProvider.getMnemonic(); + if (mnemonic != null) { + await _ensurePrimaryAddressTracked(mnemonic); + } + } + + /// Ensure the primary miner rewards address is tracked. + Future _ensurePrimaryAddressTracked(String mnemonic) async { + final keyPair = _wormholeService.deriveMinerRewardsKeyPair( + mnemonic: mnemonic, + index: 0, + ); + + if (!_addresses.containsKey(keyPair.address)) { + final tracked = TrackedWormholeAddress( + address: keyPair.address, + purpose: WormholeAddressPurpose.minerRewards, + index: 0, + secretHex: keyPair.secretHex, + ); + _addresses[keyPair.address] = tracked; + await _saveToDisk(); + } + } + + /// Derive and track a new change address. + /// + /// Returns the new address. The address is immediately persisted. + Future deriveNextChangeAddress() async { + final mnemonic = await _mnemonicProvider.getMnemonic(); + if (mnemonic == null) { + throw StateError('No mnemonic available - cannot derive change address'); + } + + final keyPair = _wormholeService.deriveKeyPair( + mnemonic: mnemonic, + purpose: WormholeAddressPurpose.change, + index: _nextChangeIndex, + ); + + final tracked = TrackedWormholeAddress( + address: keyPair.address, + purpose: WormholeAddressPurpose.change, + index: _nextChangeIndex, + secretHex: keyPair.secretHex, + ); + + _addresses[keyPair.address] = tracked; + _nextChangeIndex++; + await _saveToDisk(); + + return tracked; + } + + /// Re-derive all addresses from the mnemonic. + /// + /// This is useful after restoring from backup or when the secrets + /// need to be regenerated. + Future rederiveAllSecrets() async { + final mnemonic = await _mnemonicProvider.getMnemonic(); + if (mnemonic == null) { + return; + } + + final updatedAddresses = {}; + + for (final tracked in _addresses.values) { + final keyPair = _wormholeService.deriveKeyPair( + mnemonic: mnemonic, + purpose: tracked.purpose, + index: tracked.index, + ); + + // Verify the derived address matches + if (keyPair.address != tracked.address) { + continue; + } + + updatedAddresses[keyPair.address] = TrackedWormholeAddress( + address: keyPair.address, + purpose: tracked.purpose, + index: tracked.index, + secretHex: keyPair.secretHex, + ); + } + + _addresses + ..clear() + ..addAll(updatedAddresses); + await _saveToDisk(); + } + + /// Load tracked addresses from disk. + Future _loadFromDisk() async { + try { + final file = await _getStorageFile(); + if (await file.exists()) { + final content = await file.readAsString(); + final data = jsonDecode(content) as Map; + + _addresses.clear(); + final addressesData = data['addresses'] as List?; + if (addressesData != null) { + for (final item in addressesData) { + final tracked = TrackedWormholeAddress.fromJson( + item as Map, + ); + _addresses[tracked.address] = tracked; + } + } + + _nextChangeIndex = data['nextChangeIndex'] as int? ?? 0; + } + } catch (e) { + // Silently fail - addresses will be re-derived if needed + } + } + + /// Save tracked addresses to disk. + Future _saveToDisk() async { + try { + final file = await _getStorageFile(); + final data = { + 'nextChangeIndex': _nextChangeIndex, + 'addresses': _addresses.values.map((a) => a.toJson()).toList(), + }; + await file.writeAsString(jsonEncode(data)); + } catch (e) { + // Silently fail - not critical + } + } + + Future _getStorageFile() async { + final String basePath; + if (_customStorageDir != null) { + basePath = _customStorageDir; + } else { + final appDir = await getApplicationSupportDirectory(); + basePath = appDir.path; + } + + final quantusDir = Directory('$basePath/.quantus'); + if (!await quantusDir.exists()) { + await quantusDir.create(recursive: true); + } + return File('${quantusDir.path}/$_storageFileName'); + } + + /// Clear all tracked addresses (for reset/logout). + Future clearAll() async { + _addresses.clear(); + _nextChangeIndex = 0; + try { + final file = await _getStorageFile(); + if (await file.exists()) { + await file.delete(); + } + } catch (e) { + // Silently fail + } + } +} diff --git a/quantus_sdk/lib/src/services/wormhole_service.dart b/quantus_sdk/lib/src/services/wormhole_service.dart new file mode 100644 index 00000000..52e8e1ed --- /dev/null +++ b/quantus_sdk/lib/src/services/wormhole_service.dart @@ -0,0 +1,534 @@ +import 'package:quantus_sdk/src/rust/api/wormhole.dart' as wormhole; + +/// Purpose values for wormhole HD derivation. +class WormholePurpose { + /// Mobile app wormhole sends (future feature). + static const int mobileSends = 0; + + /// Miner rewards. + static const int minerRewards = 1; +} + +/// A wormhole key pair derived from a mnemonic. +class WormholeKeyPair { + /// The wormhole address as SS58 (the on-chain account that receives funds). + final String address; + + /// The raw address bytes (32 bytes, hex encoded with 0x prefix). + final String addressHex; + + /// The first hash / rewards preimage as SS58 (pass to node --rewards-preimage). + final String rewardsPreimage; + + /// The first hash / rewards preimage bytes (32 bytes, hex encoded). + final String rewardsPreimageHex; + + /// The secret bytes (32 bytes, hex encoded) - SENSITIVE, needed for ZK proofs. + final String secretHex; + + const WormholeKeyPair({ + required this.address, + required this.addressHex, + required this.rewardsPreimage, + required this.rewardsPreimageHex, + required this.secretHex, + }); + + factory WormholeKeyPair.fromFfi(wormhole.WormholePairResult result) { + return WormholeKeyPair( + address: result.address, + addressHex: result.addressHex, + rewardsPreimage: result.firstHashSs58, + rewardsPreimageHex: result.firstHashHex, + secretHex: result.secretHex, + ); + } +} + +/// Service for wormhole address derivation and ZK proof generation. +/// +/// Wormhole addresses are special addresses where no private key exists. +/// Instead, funds are spent using zero-knowledge proofs. This is used for +/// miner rewards in the Quantus blockchain. +/// +/// ## Usage +/// +/// ```dart +/// final service = WormholeService(); +/// +/// // Derive a wormhole key pair for miner rewards +/// final keyPair = service.deriveMinerRewardsKeyPair(mnemonic: mnemonic, index: 0); +/// +/// // Use keyPair.rewardsPreimage for the node's --rewards-preimage flag +/// // Use keyPair.secretHex for generating withdrawal proofs +/// ``` +class WormholeService { + /// Derive a wormhole key pair from a mnemonic for miner rewards. + /// + /// This derives a wormhole address at the HD path: + /// `m/44'/189189189'/0'/1'/{index}'` + /// + /// The returned key pair contains: + /// - `address`: The on-chain wormhole address that will receive rewards + /// - `rewardsPreimage`: The value to pass to `--rewards-preimage` when starting the miner node + /// - `secretHex`: The secret needed for generating withdrawal proofs (keep secure!) + WormholeKeyPair deriveMinerRewardsKeyPair({required String mnemonic, int index = 0}) { + final result = wormhole.deriveWormholePair(mnemonic: mnemonic, purpose: WormholePurpose.minerRewards, index: index); + return WormholeKeyPair.fromFfi(result); + } + + /// Derive a wormhole key pair from a mnemonic with custom purpose. + /// + /// This derives a wormhole address at the HD path: + /// `m/44'/189189189'/0'/{purpose}'/{index}'` + /// + /// Use [WormholePurpose.minerRewards] for miner reward addresses, or + /// [WormholePurpose.mobileSends] for mobile app wormhole sends (future). + WormholeKeyPair deriveKeyPair({required String mnemonic, required int purpose, int index = 0}) { + final result = wormhole.deriveWormholePair(mnemonic: mnemonic, purpose: purpose, index: index); + return WormholeKeyPair.fromFfi(result); + } + + /// Convert a rewards preimage (first_hash) to its corresponding wormhole address. + /// + /// This is useful for verifying that a given preimage produces the expected address. + String preimageToAddress(String preimageHex) { + return wormhole.firstHashToAddress(firstHashHex: preimageHex); + } + + /// Derive a wormhole address directly from a secret. + /// + /// This computes the on-chain address that corresponds to the given secret. + String deriveAddressFromSecret(String secretHex) { + return wormhole.deriveAddressFromSecret(secretHex: secretHex); + } + + /// Compute the nullifier for a UTXO. + /// + /// The nullifier is a deterministic hash of (secret, transferCount) that + /// prevents double-spending. Once revealed on-chain, the UTXO cannot be + /// spent again. + String computeNullifier({required String secretHex, required BigInt transferCount}) { + return wormhole.computeNullifier(secretHex: secretHex, transferCount: transferCount); + } + + /// Quantize an amount from planck (12 decimals) to circuit format (2 decimals). + /// + /// The ZK circuit uses quantized amounts for privacy. This function converts + /// a full-precision amount to the quantized format. + /// + /// Example: 1 QTN = 1,000,000,000,000 planck → 100 quantized + int quantizeAmount(BigInt amountPlanck) { + return wormhole.quantizeAmount(amountPlanck: amountPlanck); + } + + /// Dequantize an amount from circuit format (2 decimals) back to planck (12 decimals). + /// + /// Example: 100 quantized → 1,000,000,000,000 planck = 1 QTN + BigInt dequantizeAmount(int quantizedAmount) { + return wormhole.dequantizeAmount(quantizedAmount: quantizedAmount); + } + + /// Compute the output amount after fee deduction. + /// + /// The ZK circuit enforces that output amounts don't exceed input minus fee. + /// Use this function to compute the correct output amount for proof generation. + /// + /// Formula: `output = input * (10000 - fee_bps) / 10000` + /// + /// Example: `computeOutputAmount(38, 10)` = 37 (0.1% fee deducted) + int computeOutputAmount(int inputAmount, int feeBps) { + return wormhole.computeOutputAmount(inputAmount: inputAmount, feeBps: feeBps); + } + + /// Get the HD derivation path for a wormhole address. + String getDerivationPath({required int purpose, required int index}) { + return wormhole.getWormholeDerivationPath(purpose: purpose, index: index); + } + + /// Get the aggregation batch size from circuit config. + /// + /// This is the number of proofs that must be aggregated together before + /// submission to the chain. + BigInt getAggregationBatchSize(String circuitBinsDir) { + return wormhole.getAggregationBatchSize(binsDir: circuitBinsDir); + } + + /// Create a proof generator for generating withdrawal proofs. + /// + /// This loads ~171MB of circuit data, so it's expensive. The generator + /// should be created once and reused for all proof generations. + /// + /// [circuitBinsDir] should point to a directory containing `prover.bin` + /// and `common.bin`. + Future createProofGenerator(String circuitBinsDir) async { + final generator = await wormhole.createProofGenerator(binsDir: circuitBinsDir); + return WormholeProofGenerator._(generator); + } + + /// Create a proof aggregator for aggregating multiple proofs. + /// + /// Individual proofs must be aggregated before on-chain submission. + /// + /// [circuitBinsDir] should point to a directory containing the aggregator + /// circuit files. + Future createProofAggregator(String circuitBinsDir) async { + final aggregator = await wormhole.createProofAggregator(binsDir: circuitBinsDir); + return WormholeProofAggregator._(aggregator); + } + + /// Generate circuit binary files for ZK proof generation. + /// + /// This is a **long-running operation** (10-30 minutes on most devices) that + /// generates the circuit binaries needed for wormhole withdrawal proofs. + /// + /// [outputDir] - Directory to write the binaries to + /// [numLeafProofs] - Number of leaf proofs per aggregation (typically 8) + /// + /// Returns a [CircuitGenerationResult] indicating success or failure. + /// + /// Generated files (~163MB total): + /// - `prover.bin` - Prover circuit data (largest file) + /// - `common.bin` - Common circuit data + /// - `verifier.bin` - Verifier circuit data + /// - `dummy_proof.bin` - Dummy proof for aggregation padding + /// - `aggregated_common.bin` - Aggregated circuit common data + /// - `aggregated_verifier.bin` - Aggregated circuit verifier data + /// - `config.json` - Configuration with hashes + Future generateCircuitBinaries({required String outputDir, int numLeafProofs = 8}) { + return wormhole.generateCircuitBinaries(outputDir: outputDir, numLeafProofs: numLeafProofs); + } + + /// Check if circuit binaries exist in a directory. + /// + /// Returns true if all required circuit files are present. + bool checkCircuitBinariesExist(String binsDir) { + return wormhole.checkCircuitBinariesExist(binsDir: binsDir); + } + + /// Compute the full storage key for a wormhole TransferProof. + /// + /// This key can be used with `state_getReadProof` RPC to fetch the Merkle proof + /// needed for ZK proof generation. + /// + /// The storage key is: twox128("Wormhole") ++ twox128("TransferProof") ++ poseidon_hash(key) + /// + /// Parameters: + /// - [secretHex]: The wormhole secret (32 bytes, hex with 0x prefix) + /// - [transferCount]: The transfer count from NativeTransferred event + /// - [fundingAccount]: The account that sent the funds (SS58 format) + /// - [amount]: The exact transfer amount in planck + /// + /// Returns the full storage key as hex string with 0x prefix. + String computeTransferProofStorageKey({ + required String secretHex, + required BigInt transferCount, + required String fundingAccount, + required BigInt amount, + }) { + return wormhole.computeTransferProofStorageKey( + secretHex: secretHex, + transferCount: transferCount, + fundingAccount: fundingAccount, + amount: amount, + ); + } + + /// Encode digest logs from RPC format to SCALE-encoded bytes. + /// + /// The RPC returns digest logs as an array of hex-encoded SCALE bytes. + /// This function properly encodes them as a SCALE Vec which + /// matches what the circuit expects. + /// + /// Parameters: + /// - [logsHex]: Array of hex-encoded digest log items from RPC + /// (e.g., from `header.digest.logs` in the RPC response) + /// + /// Returns SCALE-encoded digest as hex string (with 0x prefix), + /// padded/truncated to 110 bytes as required by the circuit. + /// + /// Example: + /// ```dart + /// // From RPC: header['digest']['logs'] = ['0x0642...', '0x0561...'] + /// final digestHex = service.encodeDigestFromRpcLogs( + /// logsHex: (header['digest']['logs'] as List).cast(), + /// ); + /// ``` + String encodeDigestFromRpcLogs({required List logsHex}) { + return wormhole.encodeDigestFromRpcLogs(logsHex: logsHex); + } + + /// Compute block hash from header components. + /// + /// This matches the Poseidon block hash computation used by the Quantus chain. + /// The hash is computed over the SCALE-encoded header components. + /// + /// Parameters: + /// - [parentHashHex]: Parent block hash (32 bytes, hex with 0x prefix) + /// - [stateRootHex]: State root (32 bytes, hex with 0x prefix) + /// - [extrinsicsRootHex]: Extrinsics root (32 bytes, hex with 0x prefix) + /// - [blockNumber]: Block number + /// - [digestHex]: SCALE-encoded digest (from [encodeDigestFromRpcLogs]) + /// + /// Returns block hash as hex string with 0x prefix. + String computeBlockHash({ + required String parentHashHex, + required String stateRootHex, + required String extrinsicsRootHex, + required int blockNumber, + required String digestHex, + }) { + return wormhole.computeBlockHash( + parentHashHex: parentHashHex, + stateRootHex: stateRootHex, + extrinsicsRootHex: extrinsicsRootHex, + blockNumber: blockNumber, + digestHex: digestHex, + ); + } +} + +/// A UTXO (unspent transaction output) from a wormhole address. +/// +/// This represents funds that have been transferred to a wormhole address +/// and can be withdrawn using a ZK proof. +class WormholeUtxo { + /// The wormhole secret (hex encoded with 0x prefix). + final String secretHex; + + /// Amount in planck (12 decimal places). + final BigInt amount; + + /// Transfer count from the NativeTransferred event. + final BigInt transferCount; + + /// The funding account (sender of the original transfer) - hex encoded. + final String fundingAccountHex; + + /// Block hash where the transfer was recorded - hex encoded. + final String blockHashHex; + + const WormholeUtxo({ + required this.secretHex, + required this.amount, + required this.transferCount, + required this.fundingAccountHex, + required this.blockHashHex, + }); + + wormhole.WormholeUtxo toFfi() { + return wormhole.WormholeUtxo( + secretHex: secretHex, + amount: amount, + transferCount: transferCount, + fundingAccountHex: fundingAccountHex, + blockHashHex: blockHashHex, + ); + } +} + +/// Output assignment for a proof - where the withdrawn funds should go. +class ProofOutput { + /// Amount for the primary output (quantized to 2 decimal places). + final int amount; + + /// Exit account for the primary output (SS58 address). + final String exitAccount; + + /// Amount for the secondary output (change), 0 if unused. + final int changeAmount; + + /// Exit account for the change, empty if unused. + final String changeAccount; + + /// Create a single-output assignment (no change). + const ProofOutput.single({required this.amount, required this.exitAccount}) : changeAmount = 0, changeAccount = ''; + + /// Create a dual-output assignment (spend + change). + const ProofOutput.withChange({ + required this.amount, + required this.exitAccount, + required this.changeAmount, + required this.changeAccount, + }); + + wormhole.ProofOutputAssignment toFfi() { + return wormhole.ProofOutputAssignment( + outputAmount1: amount, + exitAccount1: exitAccount, + outputAmount2: changeAmount, + exitAccount2: changeAccount, + ); + } +} + +/// Block header data needed for proof generation. +class BlockHeader { + /// Parent block hash (hex encoded). + final String parentHashHex; + + /// State root of the block (hex encoded). + final String stateRootHex; + + /// Extrinsics root of the block (hex encoded). + final String extrinsicsRootHex; + + /// Block number. + final int blockNumber; + + /// Encoded digest (hex encoded). + final String digestHex; + + const BlockHeader({ + required this.parentHashHex, + required this.stateRootHex, + required this.extrinsicsRootHex, + required this.blockNumber, + required this.digestHex, + }); + + wormhole.BlockHeaderData toFfi() { + return wormhole.BlockHeaderData( + parentHashHex: parentHashHex, + stateRootHex: stateRootHex, + extrinsicsRootHex: extrinsicsRootHex, + blockNumber: blockNumber, + digestHex: digestHex, + ); + } +} + +/// Storage proof data for verifying a transfer exists on-chain. +class StorageProof { + /// Raw proof nodes from the state trie (each node is hex encoded). + final List proofNodesHex; + + /// State root the proof is against (hex encoded). + final String stateRootHex; + + const StorageProof({required this.proofNodesHex, required this.stateRootHex}); + + wormhole.StorageProofData toFfi() { + return wormhole.StorageProofData(proofNodesHex: proofNodesHex, stateRootHex: stateRootHex); + } +} + +/// Result of generating a ZK proof. +class GeneratedProof { + /// The serialized proof bytes (hex encoded). + final String proofHex; + + /// The nullifier for this UTXO (hex encoded). + /// Once submitted on-chain, this UTXO cannot be spent again. + final String nullifierHex; + + const GeneratedProof({required this.proofHex, required this.nullifierHex}); + + factory GeneratedProof.fromFfi(wormhole.GeneratedProof result) { + return GeneratedProof(proofHex: result.proofHex, nullifierHex: result.nullifierHex); + } +} + +/// Result of aggregating multiple proofs. +class AggregatedProof { + /// The serialized aggregated proof bytes (hex encoded). + final String proofHex; + + /// Number of real proofs in the batch (rest are dummy proofs). + final int numRealProofs; + + const AggregatedProof({required this.proofHex, required this.numRealProofs}); + + factory AggregatedProof.fromFfi(wormhole.AggregatedProof result) { + return AggregatedProof(proofHex: result.proofHex, numRealProofs: result.numRealProofs.toInt()); + } +} + +/// Generates ZK proofs for wormhole withdrawals. +/// +/// Creating a generator is expensive (loads ~171MB of circuit data), +/// so reuse the same instance for multiple proof generations. +class WormholeProofGenerator { + final wormhole.WormholeProofGenerator _inner; + + WormholeProofGenerator._(this._inner); + + /// Generate a ZK proof for withdrawing from a wormhole address. + /// + /// This proves that the caller knows the secret for the UTXO without + /// revealing it. + /// + /// Parameters: + /// - [utxo]: The UTXO to spend + /// - [output]: Where to send the funds + /// - [feeBps]: Fee in basis points (e.g., 100 = 1%) + /// - [blockHeader]: Block header data for the proof + /// - [storageProof]: Merkle proof that the UTXO exists + /// + /// Returns the generated proof and its nullifier. + Future generateProof({ + required WormholeUtxo utxo, + required ProofOutput output, + required int feeBps, + required BlockHeader blockHeader, + required StorageProof storageProof, + }) async { + final result = await _inner.generateProof( + utxo: utxo.toFfi(), + output: output.toFfi(), + feeBps: feeBps, + blockHeader: blockHeader.toFfi(), + storageProof: storageProof.toFfi(), + ); + return GeneratedProof.fromFfi(result); + } +} + +/// Aggregates multiple proofs into a single proof for on-chain submission. +/// +/// Individual proofs must be aggregated before submission to the chain. +/// If fewer proofs than the batch size are added, dummy proofs are used +/// to fill the remaining slots. +class WormholeProofAggregator { + final wormhole.WormholeProofAggregator _inner; + + WormholeProofAggregator._(this._inner); + + /// Get the batch size (number of proofs per aggregation). + Future get batchSize async { + final size = await _inner.batchSize(); + return size.toInt(); + } + + /// Get the number of proofs currently in the buffer. + Future get proofCount async { + final count = await _inner.proofCount(); + return count.toInt(); + } + + /// Add a proof to the aggregation buffer. + Future addProof(String proofHex) async { + await _inner.addProof(proofHex: proofHex); + } + + /// Add a generated proof to the aggregation buffer. + Future addGeneratedProof(GeneratedProof proof) async { + await _inner.addProof(proofHex: proof.proofHex); + } + + /// Aggregate all proofs in the buffer. + /// + /// If fewer than [batchSize] proofs have been added, the remaining + /// slots are filled with dummy proofs automatically. + /// + /// Returns the aggregated proof ready for on-chain submission. + Future aggregate() async { + final result = await _inner.aggregate(); + return AggregatedProof.fromFfi(result); + } + + /// Clear the proof buffer without aggregating. + Future clear() async { + await _inner.clear(); + } +} diff --git a/quantus_sdk/lib/src/services/wormhole_utxo_service.dart b/quantus_sdk/lib/src/services/wormhole_utxo_service.dart new file mode 100644 index 00000000..3e754542 --- /dev/null +++ b/quantus_sdk/lib/src/services/wormhole_utxo_service.dart @@ -0,0 +1,317 @@ +import 'dart:convert'; + +import 'package:convert/convert.dart'; +import 'package:http/http.dart' as http; +import 'package:quantus_sdk/src/rust/api/crypto.dart' as crypto; +import 'package:quantus_sdk/src/services/network/redundant_endpoint.dart'; +import 'package:quantus_sdk/src/services/wormhole_service.dart'; + +/// A wormhole transfer that can be spent with a ZK proof. +/// +/// This represents a deposit to a wormhole address that has not yet been +/// spent (no nullifier revealed on-chain). +class WormholeTransfer { + /// Unique identifier for this transfer. + final String id; + + /// The wormhole address that received the funds. + final String wormholeAddress; + + /// The account that sent the funds (funding account). + final String fromAddress; + + /// Amount in planck (12 decimal places). + final BigInt amount; + + /// Transfer count from the Wormhole pallet - required for ZK proof generation. + final BigInt transferCount; + + /// Block number where the transfer was recorded. + final int blockNumber; + + /// Block hash where the transfer was recorded. + final String blockHash; + + /// Timestamp of the transfer. + final DateTime timestamp; + + const WormholeTransfer({ + required this.id, + required this.wormholeAddress, + required this.fromAddress, + required this.amount, + required this.transferCount, + required this.blockNumber, + required this.blockHash, + required this.timestamp, + }); + + factory WormholeTransfer.fromJson(Map json) { + final block = json['block'] as Map?; + return WormholeTransfer( + id: json['id'] as String, + wormholeAddress: json['to']?['id'] as String? ?? '', + fromAddress: json['from']?['id'] as String? ?? '', + amount: BigInt.parse(json['amount'] as String), + transferCount: BigInt.parse(json['transferCount'] as String), + blockNumber: block?['height'] as int? ?? 0, + blockHash: block?['hash'] as String? ?? '', + timestamp: DateTime.parse(json['timestamp'] as String), + ); + } + + /// Convert to WormholeUtxo for proof generation. + /// + /// [secretHex] should be the secret derived from the mnemonic for this + /// wormhole address. + WormholeUtxo toUtxo(String secretHex) { + return WormholeUtxo( + secretHex: secretHex, + amount: amount, + transferCount: transferCount, + fundingAccountHex: _addressToHex(fromAddress), + blockHashHex: blockHash.startsWith('0x') ? blockHash : '0x$blockHash', + ); + } + + static String _addressToHex(String ss58Address) { + final bytes = crypto.ss58ToAccountId(s: ss58Address); + return '0x${hex.encode(bytes)}'; + } + + @override + String toString() { + return 'WormholeTransfer{id: $id, to: $wormholeAddress, from: $fromAddress, ' + 'amount: $amount, transferCount: $transferCount, block: $blockNumber}'; + } +} + +/// Service for querying wormhole UTXOs from Subsquid. +/// +/// This service queries the Subsquid indexer to find transfers to wormhole +/// addresses that have not been spent (no nullifier revealed on-chain). +class WormholeUtxoService { + final GraphQlEndpointService _graphQlEndpoint = GraphQlEndpointService(); + + /// GraphQL query to fetch wormhole transfers by recipient address. + /// + /// Only returns transfers with source=WORMHOLE that have a transferCount + /// (required for ZK proof generation). + static const String _transfersToWormholeQuery = r''' +query WormholeTransfers($wormholeAddress: String!, $limit: Int!, $offset: Int!) { + transfers( + limit: $limit + offset: $offset + where: { + to: { id_eq: $wormholeAddress } + source_eq: WORMHOLE + transferCount_isNull: false + } + orderBy: timestamp_DESC + ) { + id + from { id } + to { id } + amount + transferCount + timestamp + block { + height + hash + } + } +}'''; + + /// GraphQL query to check if nullifiers have been consumed. + static const String _nullifiersQuery = r''' +query CheckNullifiers($nullifiers: [String!]!) { + wormholeNullifiers( + where: { nullifier_in: $nullifiers } + ) { + nullifier + } +}'''; + + /// GraphQL query to fetch transfers by multiple wormhole addresses. + static const String _transfersToMultipleQuery = r''' +query WormholeTransfersMultiple($wormholeAddresses: [String!]!, $limit: Int!, $offset: Int!) { + transfers( + limit: $limit + offset: $offset + where: { + to: { id_in: $wormholeAddresses } + source_eq: WORMHOLE + transferCount_isNull: false + } + orderBy: timestamp_DESC + ) { + id + from { id } + to { id } + amount + transferCount + timestamp + block { + height + hash + } + } +}'''; + + /// Fetch all wormhole transfers to an address. + /// + /// This returns all transfers that have been made to the wormhole address, + /// including those that may have already been spent. + /// + /// Use [getUnspentUtxos] to filter out spent transfers. + Future> getTransfersTo(String wormholeAddress, {int limit = 100, int offset = 0}) async { + final body = jsonEncode({ + 'query': _transfersToWormholeQuery, + 'variables': {'wormholeAddress': wormholeAddress, 'limit': limit, 'offset': offset}, + }); + + final http.Response response = await _graphQlEndpoint.post(body: body); + + if (response.statusCode != 200) { + throw Exception( + 'GraphQL wormhole transfers query failed: ${response.statusCode}. ' + 'Body: ${response.body}', + ); + } + + final responseBody = jsonDecode(response.body) as Map; + if (responseBody['errors'] != null) { + throw Exception('GraphQL errors: ${responseBody['errors']}'); + } + + final transfers = responseBody['data']?['transfers'] as List?; + if (transfers == null || transfers.isEmpty) { + return []; + } + + return transfers.map((t) => WormholeTransfer.fromJson(t as Map)).toList(); + } + + /// Fetch transfers to multiple wormhole addresses. + Future> getTransfersToMultiple( + List wormholeAddresses, { + int limit = 100, + int offset = 0, + }) async { + if (wormholeAddresses.isEmpty) return []; + + final body = jsonEncode({ + 'query': _transfersToMultipleQuery, + 'variables': {'wormholeAddresses': wormholeAddresses, 'limit': limit, 'offset': offset}, + }); + + final http.Response response = await _graphQlEndpoint.post(body: body); + + if (response.statusCode != 200) { + throw Exception( + 'GraphQL wormhole transfers query failed: ${response.statusCode}. ' + 'Body: ${response.body}', + ); + } + + final responseBody = jsonDecode(response.body) as Map; + if (responseBody['errors'] != null) { + throw Exception('GraphQL errors: ${responseBody['errors']}'); + } + + final transfers = responseBody['data']?['transfers'] as List?; + if (transfers == null || transfers.isEmpty) { + return []; + } + + return transfers.map((t) => WormholeTransfer.fromJson(t as Map)).toList(); + } + + /// Check which nullifiers have been consumed on-chain. + /// + /// Returns a set of nullifier hex strings that have been spent. + Future> getConsumedNullifiers(List nullifiers) async { + if (nullifiers.isEmpty) return {}; + + final body = jsonEncode({ + 'query': _nullifiersQuery, + 'variables': {'nullifiers': nullifiers}, + }); + + final http.Response response = await _graphQlEndpoint.post(body: body); + + if (response.statusCode != 200) { + throw Exception( + 'GraphQL nullifiers query failed: ${response.statusCode}. ' + 'Body: ${response.body}', + ); + } + + final responseBody = jsonDecode(response.body) as Map; + if (responseBody['errors'] != null) { + throw Exception('GraphQL errors: ${responseBody['errors']}'); + } + + final consumed = responseBody['data']?['wormholeNullifiers'] as List?; + if (consumed == null || consumed.isEmpty) { + return {}; + } + + return consumed.map((n) => (n as Map)['nullifier'] as String).toSet(); + } + + /// Get unspent UTXOs for a wormhole address. + /// + /// This fetches all transfers to the address and filters out those whose + /// nullifiers have already been consumed on-chain. + /// + /// [secretHex] is used to compute nullifiers for each transfer. + Future> getUnspentTransfers({ + required String wormholeAddress, + required String secretHex, + int limit = 100, + }) async { + // Fetch all transfers to this address + final transfers = await getTransfersTo(wormholeAddress, limit: limit); + if (transfers.isEmpty) return []; + + // Compute nullifiers for each transfer + final wormholeService = WormholeService(); + final nullifierToTransfer = {}; + + for (final transfer in transfers) { + final nullifier = wormholeService.computeNullifier(secretHex: secretHex, transferCount: transfer.transferCount); + nullifierToTransfer[nullifier] = transfer; + } + + // Check which nullifiers have been consumed + final consumedNullifiers = await getConsumedNullifiers(nullifierToTransfer.keys.toList()); + + // Return transfers whose nullifiers have NOT been consumed + return nullifierToTransfer.entries + .where((entry) => !consumedNullifiers.contains(entry.key)) + .map((entry) => entry.value) + .toList(); + } + + /// Get total unspent balance for a wormhole address. + Future getUnspentBalance({required String wormholeAddress, required String secretHex}) async { + final unspent = await getUnspentTransfers(wormholeAddress: wormholeAddress, secretHex: secretHex); + + return unspent.fold(BigInt.zero, (sum, t) => sum + t.amount); + } + + /// Get unspent UTXOs ready for proof generation. + /// + /// Returns [WormholeUtxo] objects that can be passed directly to + /// [WormholeProofGenerator.generateProof]. + Future> getUnspentUtxos({ + required String wormholeAddress, + required String secretHex, + int limit = 100, + }) async { + final transfers = await getUnspentTransfers(wormholeAddress: wormholeAddress, secretHex: secretHex, limit: limit); + + return transfers.map((t) => t.toUtxo(secretHex)).toList(); + } +} diff --git a/quantus_sdk/lib/src/services/wormhole_withdrawal_service.dart b/quantus_sdk/lib/src/services/wormhole_withdrawal_service.dart new file mode 100644 index 00000000..ef21da54 --- /dev/null +++ b/quantus_sdk/lib/src/services/wormhole_withdrawal_service.dart @@ -0,0 +1,825 @@ +import 'dart:convert'; +import 'dart:typed_data'; + +import 'package:http/http.dart' as http; +import 'package:polkadart/polkadart.dart' show Hasher; +import 'package:polkadart/scale_codec.dart' as scale; +import 'package:quantus_sdk/generated/planck/types/frame_system/event_record.dart'; +import 'package:quantus_sdk/generated/planck/types/pallet_wormhole/pallet/call.dart' + as wormhole_call; +import 'package:quantus_sdk/generated/planck/types/pallet_wormhole/pallet/event.dart' + as wormhole_event; +import 'package:quantus_sdk/generated/planck/types/quantus_runtime/runtime_call.dart'; +import 'package:quantus_sdk/generated/planck/types/quantus_runtime/runtime_event.dart' + as runtime_event; +import 'package:quantus_sdk/generated/planck/types/sp_runtime/dispatch_error.dart' + as dispatch_error; +import 'package:quantus_sdk/generated/planck/types/frame_system/pallet/event.dart' + as system_event; +import 'package:quantus_sdk/src/services/substrate_service.dart'; +import 'package:quantus_sdk/src/services/wormhole_address_manager.dart'; +import 'package:quantus_sdk/src/services/wormhole_service.dart'; +import 'package:ss58/ss58.dart' as ss58; + +/// Progress callback for withdrawal operations. +typedef WithdrawalProgressCallback = + void Function(double progress, String message); + +/// Result of a withdrawal operation. +class WithdrawalResult { + final bool success; + final String? txHash; + final String? error; + final BigInt? exitAmount; + + /// If change was generated, this is the address where it was sent. + final String? changeAddress; + + /// The amount sent to the change address (in planck). + final BigInt? changeAmount; + + const WithdrawalResult({ + required this.success, + this.txHash, + this.error, + this.exitAmount, + this.changeAddress, + this.changeAmount, + }); +} + +/// Information about a transfer needed for proof generation. +class WormholeTransferInfo { + final String blockHash; + final BigInt transferCount; + final BigInt amount; + final String wormholeAddress; + final String fundingAccount; + + const WormholeTransferInfo({ + required this.blockHash, + required this.transferCount, + required this.amount, + required this.wormholeAddress, + required this.fundingAccount, + }); + + @override + String toString() => + 'WormholeTransferInfo(blockHash: $blockHash, transferCount: $transferCount, amount: $amount)'; +} + +/// Service for handling wormhole withdrawals. +/// +/// This orchestrates the entire withdrawal flow: +/// 1. Query chain for transfer count and transfer proofs +/// 2. For each transfer: fetch storage proof and generate ZK proof +/// 3. Aggregate proofs +/// 4. Submit transaction to chain +/// +/// ## Usage +/// +/// ```dart +/// final service = WormholeWithdrawalService(); +/// +/// final result = await service.withdraw( +/// rpcUrl: 'wss://rpc.quantus.network', +/// secretHex: '0x...', +/// wormholeAddress: 'qz...', +/// destinationAddress: 'qz...', +/// circuitBinsDir: '/path/to/circuits', +/// transfers: myTrackedTransfers, +/// onProgress: (progress, message) => print('$progress: $message'), +/// ); +/// +/// if (result.success) { +/// print('Withdrawal successful: ${result.txHash}'); +/// } +/// ``` +class WormholeWithdrawalService { + // Fee in basis points (10 = 0.1%) + static const int feeBps = 10; + + // Minimum output after quantization (3 units = 0.03 QTN) + static final BigInt minOutputPlanck = + BigInt.from(3) * BigInt.from(10).pow(10); + + // Native asset ID (0 for native token) + static const int nativeAssetId = 0; + + // Default batch size (number of proofs per aggregation) + static const int defaultBatchSize = 16; + + /// Withdraw funds from a wormhole address. + /// + /// [rpcUrl] - The RPC endpoint URL + /// [secretHex] - The wormhole secret for proof generation + /// [wormholeAddress] - The source wormhole address (SS58) + /// [destinationAddress] - Where to send the withdrawn funds (SS58) + /// [amount] - Amount to withdraw in planck (null = withdraw all) + /// [circuitBinsDir] - Directory containing circuit binary files + /// [transfers] - Pre-tracked transfers with exact amounts + /// [addressManager] - Optional address manager for deriving change addresses + /// [onProgress] - Progress callback for UI updates + Future withdraw({ + required String rpcUrl, + required String secretHex, + required String wormholeAddress, + required String destinationAddress, + BigInt? amount, + required String circuitBinsDir, + required List transfers, + WormholeAddressManager? addressManager, + WithdrawalProgressCallback? onProgress, + }) async { + try { + onProgress?.call(0.05, 'Preparing withdrawal...'); + + if (transfers.isEmpty) { + return const WithdrawalResult( + success: false, + error: 'No transfers provided for withdrawal', + ); + } + + // Calculate total available + final totalAvailable = transfers.fold( + BigInt.zero, + (sum, t) => sum + t.amount, + ); + + // Determine amount to withdraw + final withdrawAmount = amount ?? totalAvailable; + if (withdrawAmount > totalAvailable) { + return WithdrawalResult( + success: false, + error: + 'Insufficient balance. Available: $totalAvailable, requested: $withdrawAmount', + ); + } + + onProgress?.call(0.1, 'Selecting transfers...'); + + // Select transfers + final selectedTransfers = _selectTransfers(transfers, withdrawAmount); + final selectedTotal = selectedTransfers.fold( + BigInt.zero, + (sum, t) => sum + t.amount, + ); + + // Calculate output amounts after fee + final totalAfterFee = + selectedTotal - + (selectedTotal * BigInt.from(feeBps) ~/ BigInt.from(10000)); + + if (totalAfterFee < minOutputPlanck) { + return const WithdrawalResult( + success: false, + error: 'Amount too small after fee (minimum ~0.03 QTN)', + ); + } + + onProgress?.call(0.15, 'Loading circuit data...'); + + // Create proof generator + final wormholeService = WormholeService(); + final generator = await wormholeService.createProofGenerator( + circuitBinsDir, + ); + final aggregator = await wormholeService.createProofAggregator( + circuitBinsDir, + ); + + onProgress?.call(0.18, 'Fetching current block...'); + + // Get the current best block hash + final proofBlockHash = await _fetchBestBlockHash(rpcUrl); + + // Calculate if we need change + final requestedAmountQuantized = wormholeService.quantizeAmount( + withdrawAmount, + ); + + // Calculate max possible outputs for each transfer + final maxOutputsQuantized = selectedTransfers.map((t) { + final inputQuantized = wormholeService.quantizeAmount(t.amount); + return wormholeService.computeOutputAmount(inputQuantized, feeBps); + }).toList(); + final totalMaxOutputQuantized = maxOutputsQuantized.fold( + 0, + (a, b) => a + b, + ); + + // Determine if change is needed + final needsChange = requestedAmountQuantized < totalMaxOutputQuantized; + String? changeAddress; + TrackedWormholeAddress? changeAddressInfo; + + if (needsChange) { + if (addressManager == null) { + return const WithdrawalResult( + success: false, + error: + 'Partial withdrawal requires address manager for change address', + ); + } + + onProgress?.call(0.19, 'Deriving change address...'); + changeAddressInfo = await addressManager.deriveNextChangeAddress(); + changeAddress = changeAddressInfo.address; + } + + onProgress?.call(0.2, 'Generating proofs...'); + + // Generate proofs for each transfer + final proofs = []; + var remainingToSend = requestedAmountQuantized; + + for (int i = 0; i < selectedTransfers.length; i++) { + final transfer = selectedTransfers[i]; + final maxOutput = maxOutputsQuantized[i]; + final isLastTransfer = i == selectedTransfers.length - 1; + + final progress = 0.2 + (0.5 * (i / selectedTransfers.length)); + onProgress?.call( + progress, + 'Generating proof ${i + 1}/${selectedTransfers.length}...', + ); + + // Determine output and change amounts for this proof + int outputAmount; + int proofChangeAmount = 0; + + if (isLastTransfer && needsChange) { + outputAmount = remainingToSend; + proofChangeAmount = maxOutput - outputAmount; + if (proofChangeAmount < 0) proofChangeAmount = 0; + } else if (needsChange) { + outputAmount = remainingToSend < maxOutput + ? remainingToSend + : maxOutput; + } else { + outputAmount = maxOutput; + } + + remainingToSend -= outputAmount; + + try { + final proof = await _generateProofForTransfer( + generator: generator, + wormholeService: wormholeService, + transfer: transfer, + secretHex: secretHex, + destinationAddress: destinationAddress, + rpcUrl: rpcUrl, + proofBlockHash: proofBlockHash, + outputAmount: needsChange ? outputAmount : null, + changeAmount: proofChangeAmount, + changeAddress: changeAddress, + ); + proofs.add(proof); + } catch (e) { + return WithdrawalResult( + success: false, + error: 'Failed to generate proof: $e', + ); + } + } + + // Get the batch size from the aggregator + final batchSize = await aggregator.batchSize; + + // Split proofs into batches if needed + final numBatches = (proofs.length + batchSize - 1) ~/ batchSize; + + final txHashes = []; + + for (int batchIdx = 0; batchIdx < numBatches; batchIdx++) { + final batchStart = batchIdx * batchSize; + final batchEnd = (batchStart + batchSize).clamp(0, proofs.length); + final batchProofs = proofs.sublist(batchStart, batchEnd); + + final aggregateProgress = 0.7 + (0.1 * (batchIdx / numBatches)); + onProgress?.call( + aggregateProgress, + 'Aggregating batch ${batchIdx + 1}/$numBatches (${batchProofs.length} proofs)...', + ); + + // Clear aggregator and add proofs for this batch + await aggregator.clear(); + for (final proof in batchProofs) { + await aggregator.addGeneratedProof(proof); + } + final aggregatedProof = await aggregator.aggregate(); + + final submitProgress = 0.8 + (0.15 * (batchIdx / numBatches)); + onProgress?.call( + submitProgress, + 'Submitting batch ${batchIdx + 1}/$numBatches...', + ); + + // Submit this batch + final txHash = await _submitProof(proofHex: aggregatedProof.proofHex); + txHashes.add(txHash); + } + + onProgress?.call(0.95, 'Waiting for confirmations...'); + + // Wait for transaction confirmation + final lastTxHash = txHashes.last; + final confirmed = await _waitForTransactionConfirmation( + txHash: lastTxHash, + rpcUrl: rpcUrl, + destinationAddress: destinationAddress, + expectedAmount: totalAfterFee, + ); + + if (!confirmed) { + return WithdrawalResult( + success: false, + txHash: txHashes.join(', '), + error: + 'Transactions submitted but could not confirm success. Check txs: ${txHashes.join(', ')}', + ); + } + + onProgress?.call(1.0, 'Withdrawal complete!'); + + // Calculate change amount in planck if change was used + BigInt? changeAmountPlanck; + if (needsChange && changeAddress != null) { + final changeQuantized = + totalMaxOutputQuantized - requestedAmountQuantized; + changeAmountPlanck = wormholeService.dequantizeAmount(changeQuantized); + } + + return WithdrawalResult( + success: true, + txHash: txHashes.join(', '), + exitAmount: totalAfterFee, + changeAddress: changeAddress, + changeAmount: changeAmountPlanck, + ); + } catch (e) { + return WithdrawalResult(success: false, error: e.toString()); + } + } + + /// Select transfers to cover the target amount. + List _selectTransfers( + List available, + BigInt targetAmount, + ) { + // Sort by amount descending (largest first) + final sorted = List.from(available) + ..sort((a, b) => b.amount.compareTo(a.amount)); + + final selected = []; + var total = BigInt.zero; + + for (final transfer in sorted) { + if (total >= targetAmount) break; + selected.add(transfer); + total += transfer.amount; + } + + return selected; + } + + /// Generate a ZK proof for a single transfer. + Future _generateProofForTransfer({ + required WormholeProofGenerator generator, + required WormholeService wormholeService, + required WormholeTransferInfo transfer, + required String secretHex, + required String destinationAddress, + required String rpcUrl, + required String proofBlockHash, + int? outputAmount, + int changeAmount = 0, + String? changeAddress, + }) async { + final blockHash = proofBlockHash.startsWith('0x') + ? proofBlockHash + : '0x$proofBlockHash'; + + // Get block header for the proof block + final blockHeader = await _fetchBlockHeader(rpcUrl, blockHash); + + // Get storage proof for this transfer at the proof block + final storageProof = await _fetchStorageProof( + rpcUrl: rpcUrl, + blockHash: blockHash, + transfer: transfer, + secretHex: secretHex, + wormholeService: wormholeService, + ); + + // Quantize the amount for the circuit + final quantizedInputAmount = wormholeService.quantizeAmount( + transfer.amount, + ); + + // Compute the max output amount after fee deduction + final maxOutputAmount = wormholeService.computeOutputAmount( + quantizedInputAmount, + feeBps, + ); + + // Use provided output amount or default to max + final quantizedOutputAmount = outputAmount ?? maxOutputAmount; + + // Validate that output + change doesn't exceed max + if (quantizedOutputAmount + changeAmount > maxOutputAmount) { + throw ArgumentError( + 'Output ($quantizedOutputAmount) + change ($changeAmount) exceeds max allowed ($maxOutputAmount)', + ); + } + + // Create the UTXO + final fundingAccountHex = _ss58ToHex(transfer.fundingAccount); + final utxo = WormholeUtxo( + secretHex: secretHex, + amount: transfer.amount, + transferCount: transfer.transferCount, + fundingAccountHex: fundingAccountHex, + blockHashHex: blockHash, + ); + + // Create output assignment + final ProofOutput output; + if (changeAmount > 0 && changeAddress != null) { + output = ProofOutput.withChange( + amount: quantizedOutputAmount, + exitAccount: destinationAddress, + changeAmount: changeAmount, + changeAccount: changeAddress, + ); + } else { + output = ProofOutput.single( + amount: quantizedOutputAmount, + exitAccount: destinationAddress, + ); + } + + // Generate the proof + return await generator.generateProof( + utxo: utxo, + output: output, + feeBps: feeBps, + blockHeader: blockHeader, + storageProof: storageProof, + ); + } + + /// Fetch the current best block hash from the chain. + Future _fetchBestBlockHash(String rpcUrl) async { + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getBlockHash', + 'params': [], + }), + ); + + if (response.statusCode != 200) { + throw Exception( + 'Failed to fetch best block hash: ${response.statusCode}', + ); + } + + final result = jsonDecode(response.body); + if (result['error'] != null) { + throw Exception('RPC error fetching best block hash: ${result['error']}'); + } + + final blockHash = result['result'] as String?; + if (blockHash == null) { + throw Exception('No best block hash returned from chain'); + } + + return blockHash; + } + + /// Fetch block header from RPC. + Future _fetchBlockHeader(String rpcUrl, String blockHash) async { + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getHeader', + 'params': [blockHash], + }), + ); + + if (response.statusCode != 200) { + throw Exception('Failed to fetch block header: ${response.statusCode}'); + } + + final result = jsonDecode(response.body); + if (result['error'] != null) { + throw Exception( + 'RPC error fetching header for $blockHash: ${result['error']}', + ); + } + + final header = result['result']; + if (header == null) { + throw Exception( + 'Block not found: $blockHash - the block may have been pruned or the chain was reset', + ); + } + + // Use SDK to properly encode digest from RPC logs + final digestLogs = (header['digest']['logs'] as List? ?? []) + .cast() + .toList(); + final wormholeService = WormholeService(); + final digestHex = wormholeService.encodeDigestFromRpcLogs( + logsHex: digestLogs, + ); + + return BlockHeader( + parentHashHex: header['parentHash'] as String, + stateRootHex: header['stateRoot'] as String, + extrinsicsRootHex: header['extrinsicsRoot'] as String, + blockNumber: int.parse( + (header['number'] as String).substring(2), + radix: 16, + ), + digestHex: digestHex, + ); + } + + /// Fetch storage proof for a transfer. + Future _fetchStorageProof({ + required String rpcUrl, + required String blockHash, + required WormholeTransferInfo transfer, + required String secretHex, + required WormholeService wormholeService, + }) async { + // Compute the storage key using Poseidon hash + final storageKey = wormholeService.computeTransferProofStorageKey( + secretHex: secretHex, + transferCount: transfer.transferCount, + fundingAccount: transfer.fundingAccount, + amount: transfer.amount, + ); + + // Fetch the read proof from chain + final response = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getReadProof', + 'params': [ + [storageKey], + blockHash, + ], + }), + ); + + if (response.statusCode != 200) { + throw Exception('Failed to fetch storage proof: ${response.statusCode}'); + } + + final result = jsonDecode(response.body); + if (result['error'] != null) { + throw Exception('RPC error: ${result['error']}'); + } + + final proof = result['result']; + final proofNodes = (proof['proof'] as List) + .map((p) => p as String) + .toList(); + + if (proofNodes.isEmpty) { + throw Exception( + 'Empty storage proof - transfer may not exist at this block', + ); + } + + // Get state root from block header + final headerResponse = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getHeader', + 'params': [blockHash], + }), + ); + + final headerResult = jsonDecode(headerResponse.body); + if (headerResult['error'] != null) { + throw Exception('Failed to get block header: ${headerResult['error']}'); + } + + final stateRoot = headerResult['result']['stateRoot'] as String; + + return StorageProof(proofNodesHex: proofNodes, stateRootHex: stateRoot); + } + + /// Submit aggregated proof to chain as an unsigned extrinsic. + Future _submitProof({required String proofHex}) async { + final proofBytes = _hexToBytes( + proofHex.startsWith('0x') ? proofHex.substring(2) : proofHex, + ); + + final call = RuntimeCall.values.wormhole( + wormhole_call.VerifyAggregatedProof(proofBytes: proofBytes), + ); + + final txHash = await SubstrateService().submitUnsignedExtrinsic(call); + final txHashHex = '0x${_bytesToHex(txHash)}'; + return txHashHex; + } + + /// Wait for a transaction to be confirmed. + Future _waitForTransactionConfirmation({ + required String txHash, + required String rpcUrl, + required String destinationAddress, + required BigInt expectedAmount, + int maxAttempts = 30, + Duration pollInterval = const Duration(seconds: 2), + }) async { + String? lastBlockHash; + + for (var attempt = 0; attempt < maxAttempts; attempt++) { + await Future.delayed(pollInterval); + + try { + // Get block hash + final hashResponse = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'chain_getBlockHash', + 'params': [], + }), + ); + final hashResult = jsonDecode(hashResponse.body); + final currentBlockHash = hashResult['result'] as String?; + + if (currentBlockHash == null || currentBlockHash == lastBlockHash) { + continue; + } + + lastBlockHash = currentBlockHash; + + // Check events in this block for wormhole activity + final eventsKey = '0x${_twox128('System')}${_twox128('Events')}'; + final eventsResponse = await http.post( + Uri.parse(rpcUrl), + headers: {'Content-Type': 'application/json'}, + body: jsonEncode({ + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'state_getStorage', + 'params': [eventsKey, currentBlockHash], + }), + ); + final eventsResult = jsonDecode(eventsResponse.body); + final eventsHex = eventsResult['result'] as String?; + + if (eventsHex == null) { + continue; + } + + // Look for wormhole events in this block + final wormholeResult = _checkForWormholeEvents(eventsHex); + + if (wormholeResult != null) { + return wormholeResult['success'] == true; + } + } catch (e) { + // Continue trying + } + } + + return false; + } + + /// Wormhole error names (order from pallet Error enum) + static const _wormholeErrors = [ + 'InvalidProof', + 'ProofDeserializationFailed', + 'VerificationFailed', + 'InvalidPublicInputs', + 'NullifierAlreadyUsed', + 'VerifierNotAvailable', + 'InvalidStorageRoot', + 'StorageRootMismatch', + 'BlockNotFound', + 'InvalidBlockNumber', + 'AggregatedVerifierNotAvailable', + 'AggregatedProofDeserializationFailed', + 'AggregatedVerificationFailed', + 'InvalidAggregatedPublicInputs', + 'InvalidVolumeFeeRate', + 'TransferAmountBelowMinimum', + ]; + + /// Check events hex for wormhole withdrawal verification activity. + Map? _checkForWormholeEvents(String eventsHex) { + final bytes = _hexToBytes( + eventsHex.startsWith('0x') ? eventsHex.substring(2) : eventsHex, + ); + final input = scale.ByteInput(Uint8List.fromList(bytes)); + bool? success; + String? error; + + try { + final numEvents = scale.CompactCodec.codec.decode(input); + + for (var i = 0; i < numEvents; i++) { + try { + final eventRecord = EventRecord.decode(input); + final event = eventRecord.event; + + // Check for Wormhole.ProofVerified + if (event is runtime_event.Wormhole) { + final wormholeEvent = event.value0; + if (wormholeEvent is wormhole_event.ProofVerified) { + success = true; + } + } + + // Check for System.ExtrinsicFailed + if (event is runtime_event.System) { + final systemEvent = event.value0; + if (systemEvent is system_event.ExtrinsicFailed) { + if (i > 0) { + success = false; + error = _formatDispatchError(systemEvent.dispatchError); + } + } + } + } catch (e) { + break; + } + } + } catch (e) { + // Ignore decode errors + } + + if (success == null) return null; + + return {'success': success, 'error': error}; + } + + /// Format a DispatchError into a human-readable string. + String _formatDispatchError(dispatch_error.DispatchError err) { + if (err is dispatch_error.Module) { + final moduleError = err.value0; + final palletIndex = moduleError.index; + final errorIndex = moduleError.error.isNotEmpty + ? moduleError.error[0] + : 0; + + if (palletIndex == 20 && errorIndex < _wormholeErrors.length) { + return 'Wormhole.${_wormholeErrors[errorIndex]}'; + } + return 'Module(pallet=$palletIndex, error=$errorIndex)'; + } + return err.toJson().toString(); + } + + // Helper functions + + String _twox128(String input) { + final bytes = Uint8List.fromList(utf8.encode(input)); + final hash = Hasher.twoxx128.hash(bytes); + return _bytesToHex(hash); + } + + String _ss58ToHex(String ss58Address) { + final decoded = ss58.Address.decode(ss58Address); + return '0x${decoded.pubkey.map((b) => b.toRadixString(16).padLeft(2, '0')).join()}'; + } + + Uint8List _hexToBytes(String hex) { + final str = hex.startsWith('0x') ? hex.substring(2) : hex; + final result = Uint8List(str.length ~/ 2); + for (var i = 0; i < result.length; i++) { + result[i] = int.parse(str.substring(i * 2, i * 2 + 2), radix: 16); + } + return result; + } + + String _bytesToHex(List bytes) { + return bytes.map((b) => b.toRadixString(16).padLeft(2, '0')).join(); + } +} diff --git a/quantus_sdk/pubspec.lock b/quantus_sdk/pubspec.lock index 96515076..5ca9f337 100644 --- a/quantus_sdk/pubspec.lock +++ b/quantus_sdk/pubspec.lock @@ -484,10 +484,10 @@ packages: dependency: transitive description: name: meta - sha256: "23f08335362185a5ea2ad3a4e597f1375e78bce8a040df5c600c8d3552ef2394" + sha256: e3641ec5d63ebf0d9b41bd43201a66e3fc79a65db5f61fc181f04cd27aab950c url: "https://pub.dev" source: hosted - version: "1.17.0" + version: "1.16.0" nm: dependency: transitive description: @@ -513,7 +513,7 @@ packages: source: hosted version: "1.9.1" path_provider: - dependency: transitive + dependency: "direct main" description: name: path_provider sha256: "50c5dd5b6e1aaf6fb3a78b33f6aa3afca52bf903a8a5298f53101fdaee55bbcd" @@ -848,10 +848,10 @@ packages: dependency: transitive description: name: test_api - sha256: ab2726c1a94d3176a45960b6234466ec367179b87dd74f1611adb1f3b5fb9d55 + sha256: "522f00f556e73044315fa4585ec3270f1808a4b186c936e612cab0b565ff1e00" url: "https://pub.dev" source: hosted - version: "0.7.7" + version: "0.7.6" typed_data: dependency: transitive description: diff --git a/quantus_sdk/pubspec.yaml b/quantus_sdk/pubspec.yaml index be48afae..5742b439 100644 --- a/quantus_sdk/pubspec.yaml +++ b/quantus_sdk/pubspec.yaml @@ -27,6 +27,7 @@ dependencies: convert: # Version managed by melos.yaml # Storage, networking, and utilities + path_provider: # Version managed by melos.yaml shared_preferences: # Version managed by melos.yaml flutter_secure_storage: # Version managed by melos.yaml http: # Version managed by melos.yaml @@ -52,7 +53,8 @@ dependencies: polkadart: output_dir: lib/generated # Optional. Sets the directory of generated files. Provided value should be a valid path on your system. Default: lib/generated chains: # Dictionary of chains and endpoints - schrodinger: wss://a1-dirac.quantus.cat + # schrodinger: wss://a1-dirac.quantus.cat + planck: ws://127.0.0.1:9944 dev_dependencies: flutter_test: @@ -60,3 +62,7 @@ dev_dependencies: flutter_lints: # Version managed by melos.yaml integration_test: sdk: flutter + +flutter: + assets: + - assets/circuits/ diff --git a/quantus_sdk/rust/Cargo.lock b/quantus_sdk/rust/Cargo.lock index 55c17ea6..4310acdc 100644 --- a/quantus_sdk/rust/Cargo.lock +++ b/quantus_sdk/rust/Cargo.lock @@ -43,17 +43,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] - [[package]] name = "ahash" version = "0.8.12" @@ -61,6 +50,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", + "const-random", "once_cell", "version_check", "zerocopy", @@ -109,6 +99,56 @@ dependencies = [ "log", ] +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + [[package]] name = "anyhow" version = "1.0.98" @@ -325,9 +365,9 @@ dependencies = [ [[package]] name = "bip39" -version = "2.2.0" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43d193de1f7487df1914d3a568b772458861d33f9c54249612cc2893d6915054" +checksum = "90dbd31c98227229239363921e60fcf5e558e43ec69094d46fc4996f08d1d5bc" dependencies = [ "bitcoin_hashes 0.13.0", ] @@ -499,15 +539,51 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] -name = "cipher" -version = "0.4.4" +name = "clap" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a" dependencies = [ - "crypto-common", - "inout", + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.104", ] +[[package]] +name = "clap_lex" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + [[package]] name = "common-path" version = "1.0.0" @@ -530,6 +606,26 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.17", + "once_cell", + "tiny-keccak", +] + [[package]] name = "const_format" version = "0.2.34" @@ -598,6 +694,31 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + [[package]] name = "crunchy" version = "0.2.4" @@ -930,7 +1051,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -980,6 +1101,15 @@ dependencies = [ "winapi", ] +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +dependencies = [ + "static_assertions", +] + [[package]] name = "fixed-hash" version = "0.8.0" @@ -1164,13 +1294,15 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi", + "wasm-bindgen", ] [[package]] @@ -1244,6 +1376,8 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", + "rayon", + "serde", ] [[package]] @@ -1263,6 +1397,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hermit-abi" version = "0.5.2" @@ -1322,7 +1462,7 @@ checksum = "803d15461ab0dcc56706adf266158acbc44ccf719bf7d0af30705f58b90a4b8c" dependencies = [ "integer-sqrt", "num-traits", - "uint", + "uint 0.10.0", ] [[package]] @@ -1355,15 +1495,6 @@ dependencies = [ "hashbrown 0.15.4", ] -[[package]] -name = "inout" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" -dependencies = [ - "generic-array", -] - [[package]] name = "integer-sqrt" version = "0.1.5" @@ -1384,6 +1515,12 @@ dependencies = [ "libc", ] +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + [[package]] name = "itertools" version = "0.10.5" @@ -1451,6 +1588,16 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "keccak-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce2bd4c29270e724d3eaadf7bdc8700af4221fc0ed771b855eadcd1b98d52851" +dependencies = [ + "primitive-types 0.10.1", + "tiny-keccak", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -1681,6 +1828,20 @@ dependencies = [ "winapi", ] +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-bigint", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + [[package]] name = "num-bigint" version = "0.4.6" @@ -1689,6 +1850,17 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", + "rand 0.8.5", +] + +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits", + "rand 0.8.5", ] [[package]] @@ -1716,6 +1888,28 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -1750,6 +1944,12 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + [[package]] name = "opaque-debug" version = "0.3.1" @@ -2055,6 +2255,21 @@ dependencies = [ "spki", ] +[[package]] +name = "plonky2_maybe_rayon" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1e554181dc95243b8d9948ae7bae5759c7fb2502fed28f671f95ef38079406" +dependencies = [ + "rayon", +] + +[[package]] +name = "plonky2_util" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c32c137808ca984ab2458b612b7eb0462d853ee041a3136e83d54b96074c7610" + [[package]] name = "polkavm-common" version = "0.18.0" @@ -2170,18 +2385,28 @@ dependencies = [ "syn 2.0.104", ] +[[package]] +name = "primitive-types" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" +dependencies = [ + "fixed-hash 0.7.0", + "uint 0.9.5", +] + [[package]] name = "primitive-types" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d15600a7d856470b7d278b3fe0e311fe28c2526348549f8ef2ff7db3299c87f5" dependencies = [ - "fixed-hash", + "fixed-hash 0.8.0", "impl-codec", "impl-num-traits", "impl-serde", "scale-info", - "uint", + "uint 0.10.0", ] [[package]] @@ -2219,7 +2444,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ "bytes", - "heck", + "heck 0.4.1", "itertools 0.10.5", "lazy_static", "log", @@ -2256,11 +2481,112 @@ dependencies = [ "prost", ] +[[package]] +name = "qp-plonky2" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "593bccf15b8e2f9eb904ef4010f68b81ddcceb70aaf90116ce29ec09d7578dd4" +dependencies = [ + "ahash", + "anyhow", + "getrandom 0.2.17", + "hashbrown 0.14.5", + "itertools 0.11.0", + "keccak-hash", + "log", + "num", + "p3-field", + "p3-goldilocks", + "p3-poseidon2", + "p3-symmetric", + "plonky2_maybe_rayon", + "plonky2_util", + "qp-plonky2-core", + "qp-plonky2-field", + "qp-plonky2-verifier", + "qp-poseidon-constants", + "rand 0.8.5", + "rand_chacha 0.3.1", + "serde", + "static_assertions", + "unroll", + "web-time", +] + +[[package]] +name = "qp-plonky2-core" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7d30fabfd90e359640f2371c8b3e9b377d215f7dcf4e61da1f38776c5b84540" +dependencies = [ + "ahash", + "anyhow", + "hashbrown 0.14.5", + "itertools 0.11.0", + "keccak-hash", + "log", + "num", + "p3-field", + "p3-goldilocks", + "p3-poseidon2", + "p3-symmetric", + "plonky2_util", + "qp-plonky2-field", + "qp-poseidon-constants", + "rand 0.8.5", + "serde", + "static_assertions", + "unroll", +] + +[[package]] +name = "qp-plonky2-field" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20c9f8259bf4f220b1d81001458cc6c09a1372f2b3e8dac2fb489a66230385c3" +dependencies = [ + "anyhow", + "itertools 0.11.0", + "num", + "plonky2_util", + "rand 0.8.5", + "rustc_version", + "serde", + "static_assertions", + "unroll", +] + +[[package]] +name = "qp-plonky2-verifier" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd0eb89fd3cc40c4b25be95399635957d416406328169ba939db989c0444f364" +dependencies = [ + "ahash", + "anyhow", + "hashbrown 0.14.5", + "itertools 0.11.0", + "keccak-hash", + "log", + "num", + "p3-field", + "p3-goldilocks", + "p3-poseidon2", + "p3-symmetric", + "plonky2_util", + "qp-plonky2-core", + "qp-plonky2-field", + "qp-poseidon-constants", + "serde", + "static_assertions", + "unroll", +] + [[package]] name = "qp-poseidon" -version = "1.0.1" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0353086f7af1df7d45a1ecb995cf84b583c8211d7122f542044b37388b5effcd" +checksum = "4214ec389bff0c21c6ef815cf0ff00656586344dbe20f6441d23a1a6a7f56e84" dependencies = [ "log", "p3-field", @@ -2289,47 +2615,132 @@ dependencies = [ [[package]] name = "qp-poseidon-core" -version = "1.0.1" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e658a373a7fb22babeda9ffcc8af0a894e6e3c008272ed735509eccb7769ead3" +checksum = "0f65766d6de64eff741c7f402002a3322f5e563d53e0e9040aeab4921ff24f2b" dependencies = [ "p3-field", "p3-goldilocks", "p3-poseidon2", "p3-symmetric", + "qp-plonky2", "qp-poseidon-constants", "rand_chacha 0.9.0", ] [[package]] name = "qp-rusty-crystals-dilithium" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e77a42bfb3430fa3bf3f5a148f8132de301876ceb1bdf0891909c2728f044a58" +checksum = "d734438e080d69fa186dac23565dd261fa8146048af00ba8aea7467b429c104b" dependencies = [ - "aes", - "cipher", - "sha2 0.10.9", - "subtle", + "zeroize", ] [[package]] name = "qp-rusty-crystals-hdwallet" -version = "1.0.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fa242963fcd6bc970948b6904f18074673ff89cab8ed0133846a53c69deca7" +checksum = "74d9d8eb6c6a555c831496ab14348a41e4d23aa11943930a568891bf687cd8b1" dependencies = [ "bip39", + "bs58", + "getrandom 0.2.17", "hex", "hex-literal", - "nam-tiny-hderive", + "hmac", + "k256", "qp-poseidon-core", "qp-rusty-crystals-dilithium", - "rand_chacha 0.9.0", - "rand_core 0.9.3", "serde", "serde_json", - "thiserror 2.0.16", + "sha2 0.10.9", + "thiserror 2.0.18", + "zeroize", +] + +[[package]] +name = "qp-wormhole-aggregator" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bad3d3f37af4748e635f9197b2145cf4d218b97ad361e6b696724e3ddbb4e12a" +dependencies = [ + "anyhow", + "hex", + "qp-plonky2", + "qp-wormhole-circuit", + "qp-wormhole-inputs", + "qp-wormhole-prover", + "qp-zk-circuits-common", + "rand 0.8.5", + "rayon", + "serde", + "serde_json", + "sha2 0.10.9", +] + +[[package]] +name = "qp-wormhole-circuit" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7cdfba4fd293063a3e9eb964e2afb58673e9a7fd6d4edb0484783e0ed600927" +dependencies = [ + "anyhow", + "hex", + "qp-plonky2", + "qp-wormhole-inputs", + "qp-zk-circuits-common", +] + +[[package]] +name = "qp-wormhole-circuit-builder" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fde9752820d730fbb6979be3e1e948effebee844de09a57c52e5bb9a665526b" +dependencies = [ + "anyhow", + "clap", + "qp-plonky2", + "qp-wormhole-aggregator", + "qp-wormhole-circuit", + "qp-zk-circuits-common", +] + +[[package]] +name = "qp-wormhole-inputs" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53ad195630b070fc8cd9d89c55a951abaae9694434793bc87f5ab3045ded7108" +dependencies = [ + "anyhow", +] + +[[package]] +name = "qp-wormhole-prover" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d244e8514279f65d25f15ed5a6e6464905ac5276724a9233574696e11a461c3a" +dependencies = [ + "anyhow", + "qp-plonky2", + "qp-wormhole-circuit", + "qp-wormhole-inputs", + "qp-zk-circuits-common", +] + +[[package]] +name = "qp-zk-circuits-common" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d45c3d80adc2aecbcf27902569d3ec291f5f83e9d7d17ad12530f45102963faa" +dependencies = [ + "anyhow", + "hex", + "qp-plonky2", + "qp-poseidon-core", + "qp-wormhole-inputs", + "rand 0.8.5", + "serde", ] [[package]] @@ -2411,7 +2822,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.16", + "getrandom 0.2.17", ] [[package]] @@ -2429,6 +2840,26 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + [[package]] name = "redox_syscall" version = "0.5.17" @@ -2522,13 +2953,26 @@ checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" name = "rust_lib_resonance_network_wallet" version = "0.1.0" dependencies = [ + "anyhow", "flutter_rust_bridge", "hex", + "log", "nam-tiny-hderive", + "parity-scale-codec", + "qp-plonky2", "qp-poseidon", + "qp-poseidon-core", "qp-rusty-crystals-dilithium", "qp-rusty-crystals-hdwallet", + "qp-wormhole-aggregator", + "qp-wormhole-circuit", + "qp-wormhole-circuit-builder", + "qp-wormhole-inputs", + "qp-wormhole-prover", + "qp-zk-circuits-common", "quantus_ur", + "serde", + "serde_json", "sp-core 35.0.0", ] @@ -2576,7 +3020,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -2585,12 +3029,6 @@ version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" -[[package]] -name = "ryu" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" - [[package]] name = "same-file" version = "1.0.6" @@ -2701,10 +3139,11 @@ checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ + "serde_core", "serde_derive", ] @@ -2717,11 +3156,20 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", @@ -2730,14 +3178,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.142" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", - "ryu", "serde", + "serde_core", + "zmij", ] [[package]] @@ -2888,7 +3337,7 @@ dependencies = [ "parity-scale-codec", "parking_lot", "paste", - "primitive-types", + "primitive-types 0.13.1", "rand 0.8.5", "scale-info", "schnorrkel", @@ -2930,7 +3379,7 @@ dependencies = [ "parity-bip39", "parity-scale-codec", "paste", - "primitive-types", + "primitive-types 0.13.1", "scale-info", "schnorrkel", "secrecy", @@ -3056,7 +3505,7 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "polkavm-derive 0.18.0", - "primitive-types", + "primitive-types 0.13.1", "sp-externalities", "sp-runtime-interface-proc-macro 18.0.0", "sp-std", @@ -3076,7 +3525,7 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "polkavm-derive 0.24.0", - "primitive-types", + "primitive-types 0.13.1", "sp-externalities", "sp-runtime-interface-proc-macro 19.0.0", "sp-std", @@ -3252,6 +3701,12 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe895eb47f22e2ddd4dabc02bce419d2e643c8e3b585c78158b349195bc24d82" +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + [[package]] name = "substrate-bip39" version = "0.6.0" @@ -3309,7 +3764,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -3332,11 +3787,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.16" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" dependencies = [ - "thiserror-impl 2.0.16", + "thiserror-impl 2.0.18", ] [[package]] @@ -3352,9 +3807,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.16" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" dependencies = [ "proc-macro2", "quote", @@ -3410,6 +3865,15 @@ dependencies = [ "time-core", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "tinyvec" version = "1.9.0" @@ -3597,6 +4061,18 @@ version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "uint" version = "0.10.0" @@ -3630,6 +4106,16 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "unroll" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ad948c1cb799b1a70f836077721a92a35ac177d4daddf4c20a633786d4cf618" +dependencies = [ + "quote", + "syn 1.0.109", +] + [[package]] name = "ur" version = "0.3.0" @@ -3671,6 +4157,12 @@ dependencies = [ "ur", ] +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + [[package]] name = "valuable" version = "0.1.1" @@ -3811,6 +4303,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "which" version = "4.4.2" @@ -3988,9 +4490,9 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" dependencies = [ "zeroize_derive", ] @@ -4005,3 +4507,9 @@ dependencies = [ "quote", "syn 2.0.104", ] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/quantus_sdk/rust/Cargo.toml b/quantus_sdk/rust/Cargo.toml index 8c0b4b0f..1d20238e 100644 --- a/quantus_sdk/rust/Cargo.toml +++ b/quantus_sdk/rust/Cargo.toml @@ -8,15 +8,34 @@ crate-type = ["cdylib", "staticlib", "rlib"] [dependencies] # NOTE: Quantus chain dependencies. -qp-poseidon = { version = "1.0.1", default-features = false } +qp-poseidon = { version = "1.0.7", default-features = false } +qp-poseidon-core = { version = "1.0.7", default-features = false, features = ["p2", "p3"] } qp-rusty-crystals-dilithium = { version = "2.0.0", default-features = false } -qp-rusty-crystals-hdwallet = { version = "1.0.0" } +qp-rusty-crystals-hdwallet = { version = "1.3.0" } + +# ZK proof generation for wormhole withdrawals +qp-wormhole-circuit = { version = "1.0.7", default-features = false, features = ["std"] } +qp-wormhole-prover = { version = "1.0.7", default-features = false, features = ["std"] } +qp-wormhole-aggregator = { version = "1.0.7", default-features = false, features = ["rayon", "std"] } +qp-wormhole-inputs = { version = "1.0.7", default-features = false, features = ["std"] } +qp-zk-circuits-common = { version = "1.0.7", default-features = false, features = ["std"] } +qp-wormhole-circuit-builder = { version = "1.0.7", default-features = false } +plonky2 = { package = "qp-plonky2", version = "1.1.3", default-features = false, features = ["std"] } + +# Serialization for proof config +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +anyhow = "1.0" flutter_rust_bridge = "=2.11.1" hex = "0.4.3" +log = "0.4" nam-tiny-hderive = "0.3.1-nam.0" sp-core = "35.0.0" quantus_ur = { git = "https://github.com/Quantus-Network/quantus_ur.git", tag = "1.1.0" } +# Substrate codec for storage proof processing +codec = { package = "parity-scale-codec", version = "3.7", features = ["derive"] } + [lints.rust] unexpected_cfgs = { level = "warn", check-cfg = ['cfg(frb_expand)'] } diff --git a/quantus_sdk/rust/src/api/crypto.rs b/quantus_sdk/rust/src/api/crypto.rs index 543eec0b..1b06d1c1 100644 --- a/quantus_sdk/rust/src/api/crypto.rs +++ b/quantus_sdk/rust/src/api/crypto.rs @@ -1,7 +1,7 @@ use nam_tiny_hderive::bip32::ExtendedPrivKey; use qp_poseidon::PoseidonHasher; -use qp_rusty_crystals_dilithium::ml_dsa_87; -use qp_rusty_crystals_hdwallet::HDLattice; +use qp_rusty_crystals_dilithium::{ml_dsa_87, SensitiveBytes32}; +use qp_rusty_crystals_hdwallet::derive_key_from_mnemonic; pub use qp_rusty_crystals_hdwallet::HDLatticeError; use sp_core::crypto::{AccountId32, Ss58Codec}; use sp_core::Hasher; @@ -55,26 +55,20 @@ pub fn ss58_to_account_id(s: &str) -> Vec { #[flutter_rust_bridge::frb(sync)] pub fn generate_keypair(mnemonic_str: String) -> Keypair { - let hd_lattice = HDLattice::from_mnemonic(&mnemonic_str, None) - .expect("Failed to process provided mnemonic words"); - - let ml_dsa_keypair = hd_lattice.generate_keys(); + // Use default path for main account derivation + let ml_dsa_keypair = derive_key_from_mnemonic(&mnemonic_str, None, "m/44'/189'/0'/0'/0'") + .expect("Failed to derive keypair from mnemonic"); Keypair::from_ml_dsa(ml_dsa_keypair) } -// pub fn generate_derived_keys(&self, path: &str) -> Result { -// let derived_entropy = self.derive_entropy(path)?; -// Ok(Keypair::generate(&derived_entropy)) -// } - #[flutter_rust_bridge::frb(sync)] -pub fn generate_derived_keypair(mnemonic_str: String, path: &str) -> Result { - let hd_lattice = HDLattice::from_mnemonic(&mnemonic_str, None) - .expect("Failed to process provided mnemonic words"); - hd_lattice.generate_derived_keys(path).map(Keypair::from_ml_dsa) +pub fn generate_derived_keypair( + mnemonic_str: String, + path: &str, +) -> Result { + derive_key_from_mnemonic(&mnemonic_str, None, path).map(Keypair::from_ml_dsa) } - // #[flutter_rust_bridge::frb(sync)] // pub fn seed_from_mnemonic(mnemonic_str: String) -> Vec { // // Note this mirrors our implementation in rusty crystals hdwallet @@ -89,19 +83,28 @@ pub fn generate_derived_keypair(mnemonic_str: String, path: &str) -> Result) -> Keypair { - let ml_dsa_keypair = MlDsaKeypair::generate(&seed); + // Convert Vec to mutable 32-byte array for SensitiveBytes32 + let mut seed_array: [u8; 32] = seed.try_into().expect("Seed must be exactly 32 bytes"); + let sensitive_seed = SensitiveBytes32::from(&mut seed_array); + let ml_dsa_keypair = MlDsaKeypair::generate(sensitive_seed); Keypair::from_ml_dsa(ml_dsa_keypair) } #[flutter_rust_bridge::frb(sync)] pub fn sign_message(keypair: &Keypair, message: &[u8], entropy: Option<[u8; 32]>) -> Vec { let ml_dsa_keypair = keypair.to_ml_dsa(); - let signature = ml_dsa_keypair.sign(&message, None, entropy); - signature.as_slice().to_vec() + let signature = ml_dsa_keypair + .sign(message, None, entropy) + .expect("Signing should not fail"); + signature.to_vec() } #[flutter_rust_bridge::frb(sync)] -pub fn sign_message_with_pubkey(keypair: &Keypair, message: &[u8], entropy: Option<[u8; 32]>) -> Vec { +pub fn sign_message_with_pubkey( + keypair: &Keypair, + message: &[u8], + entropy: Option<[u8; 32]>, +) -> Vec { let signature = sign_message(keypair, message, entropy); let mut result = Vec::with_capacity(signature.len() + keypair.public_key.len()); result.extend_from_slice(&signature); @@ -222,4 +225,18 @@ mod tests { let is_valid = verify_message(&keypair, message, &signature); assert!(is_valid, "Signature verification failed for long message"); } + + #[test] + fn test_ss58_to_account_id() { + // Test with a Quantus address (prefix 189) + let addr = "qzjUYyuN4L3HKmBPMxHvK2n8HYnaLZcQvLSQTgdwB2nQ1g2mc"; + let bytes = ss58_to_account_id(addr); + assert_eq!(bytes.len(), 32, "Account ID should be 32 bytes"); + println!("Account bytes: 0x{}", hex::encode(&bytes)); + // Verify it's not all zeros or ones + assert!( + bytes.iter().any(|&b| b != 0 && b != 1), + "Account bytes should not be trivial" + ); + } } diff --git a/quantus_sdk/rust/src/api/mod.rs b/quantus_sdk/rust/src/api/mod.rs index 0db98d21..111132f7 100644 --- a/quantus_sdk/rust/src/api/mod.rs +++ b/quantus_sdk/rust/src/api/mod.rs @@ -1,2 +1,3 @@ pub mod crypto; -pub mod ur; \ No newline at end of file +pub mod ur; +pub mod wormhole; diff --git a/quantus_sdk/rust/src/api/ur.rs b/quantus_sdk/rust/src/api/ur.rs index a8dbbc19..b63cd852 100644 --- a/quantus_sdk/rust/src/api/ur.rs +++ b/quantus_sdk/rust/src/api/ur.rs @@ -2,13 +2,13 @@ /// use quantus_ur::{decode_bytes, encode_bytes, is_complete}; -// Note decode_ur takes the list of QR Codes in any order and assembles them correctly. -// It also deals with the weird elements that are created in the UR standard when we exceed the number -// of segments. -// For example if you have 3 segments, and the scanner scans all 3 but doesn't succeed, subsequent parts +// Note decode_ur takes the list of QR Codes in any order and assembles them correctly. +// It also deals with the weird elements that are created in the UR standard when we exceed the number +// of segments. +// For example if you have 3 segments, and the scanner scans all 3 but doesn't succeed, subsequent parts // are sent with strange numbers like /412-3/ which are encoded with pieces of the previous segments so that -// the algorithm recovers faster than just repeating the segments over and over. This is described in the UR -// standard. FYI. +// the algorithm recovers faster than just repeating the segments over and over. This is described in the UR +// standard. FYI. #[flutter_rust_bridge::frb(sync)] pub fn decode_ur(ur_parts: Vec) -> Result, String> { decode_bytes(&ur_parts).map_err(|e| e.to_string()) @@ -32,10 +32,10 @@ mod tests { fn test_single_part_roundtrip() { let hex_payload = "0200007416854906f03a9dff66e3270a736c44e15970ac03a638471523a03069f276ca0700e876481755010000007400000002000000"; let payload_bytes = hex::decode(hex_payload).expect("Hex decode failed"); - + let encoded_parts = encode_ur(payload_bytes.clone()).expect("Encoding failed"); assert_eq!(encoded_parts.len(), 1, "Should be single part"); - + let decoded_bytes = decode_ur(encoded_parts).expect("Decoding failed"); assert_eq!(decoded_bytes, payload_bytes); } @@ -44,10 +44,10 @@ mod tests { fn test_multi_part_roundtrip() { let hex_payload = "0200007416854906f03a9dff66e3270a736c44e15970ac03a638471523a03069f276ca0700e876481755010000007400000002000000".repeat(10); let payload_bytes = hex::decode(&hex_payload).expect("Hex decode failed"); - + let encoded_parts = encode_ur(payload_bytes.clone()).expect("Encoding failed"); assert!(encoded_parts.len() > 1, "Should be multiple parts"); - + let decoded_bytes = decode_ur(encoded_parts).expect("Decoding failed"); assert_eq!(decoded_bytes, payload_bytes); } @@ -57,8 +57,11 @@ mod tests { let hex_payload = "0200007416854906f03a9dff66e3270a736c44e15970ac03a638471523a03069f276ca0700e876481755010000007400000002000000"; let payload_bytes = hex::decode(hex_payload).expect("Hex decode failed"); let encoded_parts = encode_ur(payload_bytes).expect("Encoding failed"); - - assert!(is_complete_ur(encoded_parts), "Single part should be complete"); + + assert!( + is_complete_ur(encoded_parts), + "Single part should be complete" + ); } #[test] @@ -66,8 +69,11 @@ mod tests { let hex_payload = "0200007416854906f03a9dff66e3270a736c44e15970ac03a638471523a03069f276ca0700e876481755010000007400000002000000".repeat(10); let payload_bytes = hex::decode(&hex_payload).expect("Hex decode failed"); let encoded_parts = encode_ur(payload_bytes).expect("Encoding failed"); - - assert!(is_complete_ur(encoded_parts), "All parts should be complete"); + + assert!( + is_complete_ur(encoded_parts), + "All parts should be complete" + ); } #[test] @@ -75,11 +81,14 @@ mod tests { let hex_payload = "0200007416854906f03a9dff66e3270a736c44e15970ac03a638471523a03069f276ca0700e876481755010000007400000002000000".repeat(10); let payload_bytes = hex::decode(&hex_payload).expect("Hex decode failed"); let encoded_parts = encode_ur(payload_bytes).expect("Encoding failed"); - + assert!(encoded_parts.len() > 1, "Should have multiple parts"); - + let incomplete_parts = vec![encoded_parts[0].clone()]; - assert!(!is_complete_ur(incomplete_parts), "Incomplete parts should return false"); + assert!( + !is_complete_ur(incomplete_parts), + "Incomplete parts should return false" + ); } #[test] @@ -87,18 +96,23 @@ mod tests { let hex_payload = "0200007416854906f03a9dff66e3270a736c44e15970ac03a638471523a03069f276ca0700e876481755010000007400000002000000".repeat(10); let payload_bytes = hex::decode(&hex_payload).expect("Hex decode failed"); let encoded_parts = encode_ur(payload_bytes.clone()).expect("Encoding failed"); - + assert!(encoded_parts.len() > 1, "Should be multiple parts"); - + let mut scrambled_parts = encoded_parts.clone(); scrambled_parts.reverse(); let mid = scrambled_parts.len() / 2; scrambled_parts.swap(0, mid); - + let decoded_bytes = decode_ur(scrambled_parts.clone()).expect("Decoding failed"); - assert_eq!(decoded_bytes, payload_bytes, "Decoding should work regardless of part order"); - - assert!(is_complete_ur(scrambled_parts), "Scrambled parts should still be complete"); - } + assert_eq!( + decoded_bytes, payload_bytes, + "Decoding should work regardless of part order" + ); + assert!( + is_complete_ur(scrambled_parts), + "Scrambled parts should still be complete" + ); + } } diff --git a/quantus_sdk/rust/src/api/wormhole.rs b/quantus_sdk/rust/src/api/wormhole.rs new file mode 100644 index 00000000..f49b42c7 --- /dev/null +++ b/quantus_sdk/rust/src/api/wormhole.rs @@ -0,0 +1,1416 @@ +//! Wormhole address derivation and utilities for ZK proof-based token spending. +//! +//! This module provides functionality to: +//! - Derive wormhole addresses from a mnemonic using HD derivation +//! - Convert wormhole preimages to SS58 addresses +//! +//! ## Wormhole Address Derivation +//! +//! Wormhole addresses are derived using a two-step Poseidon hash: +//! 1. `first_hash` = Poseidon(salt || secret) where salt = "wormhole" +//! 2. `address` = Poseidon(first_hash) +//! +//! The `first_hash` is used as the rewards preimage (passed to the node via --rewards-preimage). +//! The `address` is the actual on-chain account that receives funds. +//! +//! ## HD Path Convention +//! +//! Wormhole secrets are derived using BIP44-style paths: +//! - Coin type: 189189189' (QUANTUS_WORMHOLE_CHAIN_ID) +//! - Full path: m/44'/189189189'/0'/{purpose}'/{index}' +//! +//! Purpose values: +//! - 0 = Mobile app wormhole sends (future) +//! - 1 = Miner rewards + +use plonky2::field::types::PrimeField64; +use qp_rusty_crystals_hdwallet::{ + derive_wormhole_from_mnemonic, WormholePair, QUANTUS_WORMHOLE_CHAIN_ID, +}; +use sp_core::crypto::{AccountId32, Ss58Codec}; + +/// Result of wormhole pair derivation +#[flutter_rust_bridge::frb(sync)] +pub struct WormholePairResult { + /// The wormhole address as SS58 (the on-chain account) + pub address: String, + /// The raw address bytes (32 bytes, hex encoded) + pub address_hex: String, + /// The first hash / rewards preimage as SS58 (pass to --rewards-preimage) + pub first_hash_ss58: String, + /// The first hash / rewards preimage bytes (32 bytes, hex encoded) + pub first_hash_hex: String, + /// The secret bytes (32 bytes, hex encoded) - SENSITIVE, needed for ZK proofs + pub secret_hex: String, +} + +impl From for WormholePairResult { + fn from(pair: WormholePair) -> Self { + let account = AccountId32::from(pair.address); + let first_hash_account = AccountId32::from(pair.first_hash); + + WormholePairResult { + address: account.to_ss58check(), + address_hex: format!("0x{}", hex::encode(pair.address)), + first_hash_ss58: first_hash_account.to_ss58check(), + first_hash_hex: format!("0x{}", hex::encode(pair.first_hash)), + secret_hex: format!("0x{}", hex::encode(pair.secret)), + } + } +} + +/// Error type for wormhole operations +#[flutter_rust_bridge::frb(sync)] +#[derive(Debug)] +pub struct WormholeError { + pub message: String, +} + +impl WormholeError { + /// Returns the error message as a string for display. + #[flutter_rust_bridge::frb(sync, name = "toString")] + pub fn to_display_string(&self) -> String { + format!("WormholeError: {}", self.message) + } +} + +impl std::fmt::Display for WormholeError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.message) + } +} + +impl std::error::Error for WormholeError {} + +/// Derive a wormhole address pair from a mnemonic. +/// +/// # Arguments +/// * `mnemonic` - The 24-word BIP39 mnemonic phrase +/// * `purpose` - The purpose index (0 = mobile sends, 1 = miner rewards) +/// * `index` - The address index within the purpose +/// +/// # Returns +/// A `WormholePairResult` containing the address, first_hash, and secret. +/// +/// # Example +/// ```ignore +/// let result = derive_wormhole_pair( +/// "word1 word2 ... word24".to_string(), +/// 1, // purpose: miner rewards +/// 0, // index: first address +/// )?; +/// println!("Rewards preimage (for --rewards-preimage): {}", result.first_hash_ss58); +/// println!("Wormhole address (on-chain account): {}", result.address); +/// ``` +#[flutter_rust_bridge::frb(sync)] +pub fn derive_wormhole_pair( + mnemonic: String, + purpose: u32, + index: u32, +) -> Result { + // Build the HD path: m/44'/189189189'/0'/{purpose}'/{index}' + // Note: QUANTUS_WORMHOLE_CHAIN_ID already includes the apostrophe (189189189') + let path = format!( + "m/44'/{}/0'/{}'/{}'", + QUANTUS_WORMHOLE_CHAIN_ID, purpose, index + ); + + let pair = + derive_wormhole_from_mnemonic(&mnemonic, None, &path).map_err(|e| WormholeError { + message: format!("Failed to derive wormhole pair: {:?}", e), + })?; + + Ok(pair.into()) +} + +/// Convert a first_hash (rewards preimage) to its corresponding wormhole address. +/// +/// This computes the address exactly as the chain and ZK circuit do: +/// - Convert first_hash (32 bytes) to 4 field elements using unsafe_digest_bytes_to_felts +/// (8 bytes per element) +/// - Hash once without padding using hash_variable_length +/// +/// The wormhole address derivation is: +/// - secret -> hash(salt + secret) = first_hash (preimage for node) +/// - first_hash -> hash(first_hash) = address +/// +/// # Arguments +/// * `first_hash_hex` - The first_hash bytes as hex string (with or without 0x prefix) +/// +/// # Returns +/// The wormhole address as SS58 string. +#[flutter_rust_bridge::frb(sync)] +pub fn first_hash_to_address(first_hash_hex: String) -> Result { + let hex_str = first_hash_hex.trim_start_matches("0x"); + let first_hash_bytes: [u8; 32] = hex::decode(hex_str) + .map_err(|e| WormholeError { + message: format!("Invalid hex string: {}", e), + })? + .try_into() + .map_err(|_| WormholeError { + message: "First hash must be exactly 32 bytes".to_string(), + })?; + + // The address is hash(first_hash) using the same method as the ZK circuit: + // - unsafe_digest_bytes_to_felts: 32 bytes -> 4 field elements (8 bytes each) + // - hash_variable_length: hash without padding + use qp_poseidon_core::{hash_variable_length, serialization::unsafe_digest_bytes_to_felts}; + + let first_hash_felts = unsafe_digest_bytes_to_felts(&first_hash_bytes); + let address_bytes = hash_variable_length(first_hash_felts.to_vec()); + + let account = AccountId32::from(address_bytes); + Ok(account.to_ss58check()) +} + +/// Get the wormhole HD derivation path for a given purpose and index. +/// +/// # Arguments +/// * `purpose` - The purpose index (0 = mobile sends, 1 = miner rewards) +/// * `index` - The address index within the purpose +/// +/// # Returns +/// The full HD derivation path string. +#[flutter_rust_bridge::frb(sync)] +pub fn get_wormhole_derivation_path(purpose: u32, index: u32) -> String { + format!( + "m/44'/{}/0'/{}'/{}'", + QUANTUS_WORMHOLE_CHAIN_ID, purpose, index + ) +} + +/// Constants for wormhole derivation purposes +pub mod wormhole_purpose { + /// Mobile app wormhole sends (future feature) + pub const MOBILE_SENDS: u32 = 0; + /// Miner rewards + pub const MINER_REWARDS: u32 = 1; +} + +// ============================================================================ +// Proof Generation Types and Functions +// ============================================================================ + +/// A wormhole UTXO (unspent transaction output) - FFI-friendly version. +/// +/// Represents an unspent wormhole deposit that can be used as input +/// for generating a proof. +#[flutter_rust_bridge::frb(sync)] +#[derive(Debug, Clone)] +pub struct WormholeUtxo { + /// The secret used to derive the wormhole address (hex encoded with 0x prefix). + pub secret_hex: String, + /// Amount in planck (12 decimal places). + pub amount: u64, // Using u64 for FFI compatibility (actual is u128 but rewards are small) + /// Transfer count from the NativeTransferred event. + pub transfer_count: u64, + /// The funding account (sender of the original transfer) - hex encoded. + pub funding_account_hex: String, + /// Block hash where the transfer was recorded - hex encoded. + pub block_hash_hex: String, +} + +/// Output assignment for a proof - where the funds go. +#[flutter_rust_bridge::frb(sync)] +#[derive(Debug, Clone)] +pub struct ProofOutputAssignment { + /// Amount for output 1 (quantized to 2 decimal places). + pub output_amount_1: u32, + /// Exit account for output 1 (SS58 address). + pub exit_account_1: String, + /// Amount for output 2 (quantized, 0 if unused). + pub output_amount_2: u32, + /// Exit account for output 2 (SS58 address, empty if unused). + pub exit_account_2: String, +} + +/// Block header data needed for proof generation. +#[flutter_rust_bridge::frb(sync)] +#[derive(Debug, Clone)] +pub struct BlockHeaderData { + /// Parent block hash (hex encoded). + pub parent_hash_hex: String, + /// State root of the block (hex encoded). + pub state_root_hex: String, + /// Extrinsics root of the block (hex encoded). + pub extrinsics_root_hex: String, + /// Block number. + pub block_number: u32, + /// Encoded digest (hex encoded, up to 110 bytes). + pub digest_hex: String, +} + +/// Storage proof data for the transfer. +#[flutter_rust_bridge::frb(sync)] +#[derive(Debug, Clone)] +pub struct StorageProofData { + /// Raw proof nodes from the state trie (each node is hex encoded). + pub proof_nodes_hex: Vec, + /// State root the proof is against (hex encoded). + pub state_root_hex: String, +} + +/// Configuration loaded from circuit binaries directory. +#[flutter_rust_bridge::frb(sync)] +#[derive(Debug, Clone, serde::Deserialize)] +pub struct CircuitConfig { + /// Number of leaf proofs in an aggregation batch. + pub num_leaf_proofs: usize, +} + +impl CircuitConfig { + /// Load configuration from a circuit binaries directory. + pub fn load(bins_dir: &str) -> Result { + let config_path = std::path::Path::new(bins_dir).join("config.json"); + let config_str = std::fs::read_to_string(&config_path).map_err(|e| WormholeError { + message: format!( + "Failed to read config from {}: {}", + config_path.display(), + e + ), + })?; + + serde_json::from_str(&config_str).map_err(|e| WormholeError { + message: format!("Failed to parse config: {}", e), + }) + } +} + +/// Result of proof generation. +#[flutter_rust_bridge::frb(sync)] +#[derive(Debug, Clone)] +pub struct GeneratedProof { + /// The serialized proof bytes (hex encoded). + pub proof_hex: String, + /// The nullifier for this UTXO (hex encoded) - used to track spent UTXOs. + pub nullifier_hex: String, +} + +/// Result of proof aggregation. +#[flutter_rust_bridge::frb(sync)] +#[derive(Debug, Clone)] +pub struct AggregatedProof { + /// The serialized aggregated proof bytes (hex encoded). + pub proof_hex: String, + /// Number of real proofs in the batch (rest are dummies). + pub num_real_proofs: usize, +} + +/// Compute the nullifier for a wormhole UTXO. +/// +/// The nullifier is a deterministic hash of (secret, transfer_count) that prevents +/// double-spending. Once revealed on-chain, the UTXO cannot be spent again. +/// +/// # Arguments +/// * `secret_hex` - The wormhole secret (32 bytes, hex with 0x prefix) +/// * `transfer_count` - The transfer count from NativeTransferred event +/// +/// # Returns +/// The nullifier as hex string with 0x prefix. +#[flutter_rust_bridge::frb(sync)] +pub fn compute_nullifier(secret_hex: String, transfer_count: u64) -> Result { + let secret_bytes = parse_hex_32(&secret_hex)?; + let secret: qp_zk_circuits_common::utils::BytesDigest = + secret_bytes.try_into().map_err(|e| WormholeError { + message: format!("Invalid secret bytes: {:?}", e), + })?; + + let nullifier = + qp_wormhole_circuit::nullifier::Nullifier::from_preimage(secret, transfer_count); + let nullifier_bytes = qp_zk_circuits_common::utils::digest_felts_to_bytes(nullifier.hash); + + Ok(format!("0x{}", hex::encode(nullifier_bytes.as_ref()))) +} + +/// Derive the wormhole address from a secret. +/// +/// This computes the unspendable account address that corresponds to the given secret. +/// +/// # Arguments +/// * `secret_hex` - The wormhole secret (32 bytes, hex with 0x prefix) +/// +/// # Returns +/// The wormhole address as SS58 string. +#[flutter_rust_bridge::frb(sync)] +pub fn derive_address_from_secret(secret_hex: String) -> Result { + let secret_bytes = parse_hex_32(&secret_hex)?; + let secret: qp_zk_circuits_common::utils::BytesDigest = + secret_bytes.try_into().map_err(|e| WormholeError { + message: format!("Invalid secret bytes: {:?}", e), + })?; + + let unspendable = + qp_wormhole_circuit::unspendable_account::UnspendableAccount::from_secret(secret); + let address_bytes = qp_zk_circuits_common::utils::digest_felts_to_bytes(unspendable.account_id); + + let account = AccountId32::from( + <[u8; 32]>::try_from(address_bytes.as_ref()).expect("BytesDigest is always 32 bytes"), + ); + Ok(account.to_ss58check()) +} + +/// Quantize an amount from planck (12 decimals) to the circuit format (2 decimals). +/// +/// The circuit uses quantized amounts for privacy. This function converts +/// a full-precision amount to the quantized format. +/// +/// # Arguments +/// * `amount_planck` - Amount in planck (smallest unit, 12 decimal places) +/// +/// # Returns +/// Quantized amount (2 decimal places) that can be used in proof outputs. +#[flutter_rust_bridge::frb(sync)] +pub fn quantize_amount(amount_planck: u64) -> Result { + // 12 decimals to 2 decimals = divide by 10^10 + const QUANTIZATION_FACTOR: u64 = 10_000_000_000; // 10^10 + + let quantized = amount_planck / QUANTIZATION_FACTOR; + + if quantized > u32::MAX as u64 { + return Err(WormholeError { + message: format!("Amount too large to quantize: {}", amount_planck), + }); + } + + Ok(quantized as u32) +} + +/// Dequantize an amount from circuit format (2 decimals) back to planck (12 decimals). +/// +/// # Arguments +/// * `quantized_amount` - Amount in circuit format (2 decimal places) +/// +/// # Returns +/// Amount in planck (12 decimal places). +#[flutter_rust_bridge::frb(sync)] +pub fn dequantize_amount(quantized_amount: u32) -> u64 { + const QUANTIZATION_FACTOR: u64 = 10_000_000_000; // 10^10 + quantized_amount as u64 * QUANTIZATION_FACTOR +} + +/// Compute the output amount after fee deduction. +/// +/// The circuit enforces that output amounts don't exceed input minus fee. +/// Use this function to compute the correct output amount for proof generation. +/// +/// Formula: `output = input * (10000 - fee_bps) / 10000` +/// +/// # Arguments +/// * `input_amount` - Input amount in quantized units (from quantize_amount) +/// * `fee_bps` - Fee rate in basis points (e.g., 10 = 0.1%) +/// +/// # Returns +/// Maximum output amount in quantized units. +/// +/// # Example +/// ```ignore +/// let input = quantize_amount(383561629241)?; // 38 in quantized +/// let output = compute_output_amount(input, 10); // 37 (after 0.1% fee) +/// ``` +#[flutter_rust_bridge::frb(sync)] +pub fn compute_output_amount(input_amount: u32, fee_bps: u32) -> u32 { + ((input_amount as u64) * (10000 - fee_bps as u64) / 10000) as u32 +} + +/// Get the batch size for proof aggregation. +/// +/// # Arguments +/// * `bins_dir` - Path to circuit binaries directory +/// +/// # Returns +/// Number of proofs that must be aggregated together. +#[flutter_rust_bridge::frb(sync)] +pub fn get_aggregation_batch_size(bins_dir: String) -> Result { + let config = CircuitConfig::load(&bins_dir)?; + Ok(config.num_leaf_proofs) +} + +/// Encode digest logs from RPC format to SCALE-encoded bytes. +/// +/// The RPC returns digest logs as an array of hex-encoded SCALE bytes. +/// This function properly encodes them as a SCALE Vec which +/// matches what the circuit expects. +/// +/// # Arguments +/// * `logs_hex` - Array of hex-encoded digest log items from RPC +/// +/// # Returns +/// SCALE-encoded digest as hex string (with 0x prefix), padded/truncated to 110 bytes. +/// +/// # Example +/// ```ignore +/// // From RPC: header.digest.logs = ["0x0642...", "0x0561..."] +/// let digest_hex = encode_digest_from_rpc_logs(vec!["0x0642...".into(), "0x0561...".into()])?; +/// ``` +#[flutter_rust_bridge::frb(sync)] +pub fn encode_digest_from_rpc_logs(logs_hex: Vec) -> Result { + use codec::Encode; + + // Each log is already a SCALE-encoded DigestItem + // We need to encode them as Vec: compact(length) ++ items + let mut encoded = Vec::new(); + + // Encode compact length prefix + codec::Compact(logs_hex.len() as u32).encode_to(&mut encoded); + + // Append each log's raw bytes + for log_hex in &logs_hex { + let log_bytes = parse_hex(log_hex)?; + encoded.extend_from_slice(&log_bytes); + } + + // Pad or truncate to exactly 110 bytes (DIGEST_LOGS_SIZE) + const DIGEST_LOGS_SIZE: usize = 110; + let mut result = [0u8; DIGEST_LOGS_SIZE]; + let copy_len = encoded.len().min(DIGEST_LOGS_SIZE); + result[..copy_len].copy_from_slice(&encoded[..copy_len]); + + Ok(format!("0x{}", hex::encode(result))) +} + +/// Compute the full storage key for a wormhole TransferProof. +/// +/// This key can be used with `state_getReadProof` RPC to fetch the Merkle proof +/// needed for ZK proof generation. +/// +/// The storage key is: module_prefix ++ storage_prefix ++ poseidon_hash(key) +/// +/// # Arguments +/// * `secret_hex` - The wormhole secret (32 bytes, hex with 0x prefix) +/// * `transfer_count` - The transfer count from NativeTransferred event +/// * `funding_account` - The account that sent the funds (SS58 format) +/// * `amount` - The exact transfer amount in planck +/// +/// # Returns +/// The full storage key as hex string with 0x prefix. +#[flutter_rust_bridge::frb(sync)] +pub fn compute_transfer_proof_storage_key( + secret_hex: String, + transfer_count: u64, + funding_account: String, + amount: u64, +) -> Result { + // Compute wormhole address from secret + let secret_bytes = parse_hex_32(&secret_hex)?; + let secret_digest: qp_zk_circuits_common::utils::BytesDigest = + secret_bytes.try_into().map_err(|e| WormholeError { + message: format!("Invalid secret: {:?}", e), + })?; + + let unspendable = + qp_wormhole_circuit::unspendable_account::UnspendableAccount::from_secret(secret_digest); + let unspendable_bytes = + qp_zk_circuits_common::utils::digest_felts_to_bytes(unspendable.account_id); + let wormhole_address: [u8; 32] = unspendable_bytes + .as_ref() + .try_into() + .expect("BytesDigest is always 32 bytes"); + + // Parse funding account + let funding_account_bytes = ss58_to_bytes(&funding_account)?; + + // Compute the Poseidon hash of the storage key + let leaf_hash = compute_transfer_proof_leaf_hash( + 0, // asset_id = 0 for native token + transfer_count, + &funding_account_bytes, + &wormhole_address, + amount as u128, + )?; + + // Build the full storage key: + // twox128("Wormhole") ++ twox128("TransferProof") ++ poseidon_hash + // + // Pre-computed twox128 hashes: + // twox128("Wormhole") = 0x1cbfc5e0de51116eb98c56a3b9fd8c8b + // twox128("TransferProof") = 0x4a4ee9c5fb3e0a4c6f3b6daa9b1c7b28 + // + // Note: These hashes are computed using xxhash and are deterministic. + // Using the standard Substrate storage prefix computation. + use sp_core::twox_128; + + let module_prefix = twox_128(b"Wormhole"); + let storage_prefix = twox_128(b"TransferProof"); + + let mut full_key = Vec::with_capacity(32 + 32); + full_key.extend_from_slice(&module_prefix); + full_key.extend_from_slice(&storage_prefix); + full_key.extend_from_slice(&leaf_hash); + + Ok(format!("0x{}", hex::encode(full_key))) +} + +// ============================================================================ +// Proof Generator - Stateful wrapper for proof generation +// ============================================================================ + +use std::sync::Mutex; + +/// Opaque handle to a proof generator. +/// +/// The generator is expensive to initialize (loads ~171MB of circuit data), +/// so it should be created once and reused for all proof generations. +pub struct WormholeProofGenerator { + pub bins_dir: String, +} + +impl WormholeProofGenerator { + /// Create a new proof generator from circuit files. + /// + /// # Arguments + /// * `bins_dir` - Path to directory containing prover.bin and common.bin + /// + /// # Returns + /// A new proof generator instance. + pub fn new(bins_dir: String) -> Result { + // Verify the circuit files exist + let bins_path = std::path::Path::new(&bins_dir); + let prover_path = bins_path.join("prover.bin"); + let common_path = bins_path.join("common.bin"); + + if !prover_path.exists() { + return Err(WormholeError { + message: format!("prover.bin not found at {:?}", prover_path), + }); + } + if !common_path.exists() { + return Err(WormholeError { + message: format!("common.bin not found at {:?}", common_path), + }); + } + + Ok(Self { bins_dir }) + } + + /// Generate a proof for a wormhole withdrawal. + /// + /// # Arguments + /// * `utxo` - The UTXO to spend + /// * `output` - Where to send the funds + /// * `fee_bps` - Fee in basis points + /// * `block_header` - Block header for the proof + /// * `storage_proof` - Storage proof for the transfer + /// + /// # Returns + /// The generated proof and nullifier. + pub fn generate_proof( + &self, + utxo: WormholeUtxo, + output: ProofOutputAssignment, + fee_bps: u32, + block_header: BlockHeaderData, + storage_proof: StorageProofData, + ) -> Result { + // Parse all hex inputs + let secret = parse_hex_32(&utxo.secret_hex)?; + let funding_account = parse_hex_32(&utxo.funding_account_hex)?; + // Use the actual block hash from the chain (from the UTXO), not a computed one. + // The circuit will verify this matches the hash of the header components. + let block_hash = parse_hex_32(&utxo.block_hash_hex)?; + + let parent_hash = parse_hex_32(&block_header.parent_hash_hex)?; + let state_root = parse_hex_32(&block_header.state_root_hex)?; + let extrinsics_root = parse_hex_32(&block_header.extrinsics_root_hex)?; + let digest = parse_hex(&block_header.digest_hex)?; + + let exit_account_1 = ss58_to_bytes(&output.exit_account_1)?; + let exit_account_2 = if output.exit_account_2.is_empty() { + [0u8; 32] + } else { + ss58_to_bytes(&output.exit_account_2)? + }; + + // Compute nullifier + let secret_digest: qp_zk_circuits_common::utils::BytesDigest = + secret.try_into().map_err(|e| WormholeError { + message: format!("Invalid secret: {:?}", e), + })?; + let nullifier = qp_wormhole_circuit::nullifier::Nullifier::from_preimage( + secret_digest, + utxo.transfer_count, + ); + let nullifier_bytes = qp_zk_circuits_common::utils::digest_felts_to_bytes(nullifier.hash); + + // Compute unspendable account + let unspendable = qp_wormhole_circuit::unspendable_account::UnspendableAccount::from_secret( + secret_digest, + ); + let unspendable_bytes = + qp_zk_circuits_common::utils::digest_felts_to_bytes(unspendable.account_id); + + // Process storage proof + let proof_nodes: Vec> = storage_proof + .proof_nodes_hex + .iter() + .map(|h| parse_hex(h)) + .collect::>()?; + let storage_state_root = parse_hex_32(&storage_proof.state_root_hex)?; + + let wormhole_address: [u8; 32] = unspendable_bytes + .as_ref() + .try_into() + .expect("BytesDigest is always 32 bytes"); + + let processed_proof = qp_zk_circuits_common::storage_proof::prepare_proof_for_circuit( + proof_nodes, + hex::encode(storage_state_root), + compute_transfer_proof_leaf_hash( + 0, // asset_id = 0 for native token + utxo.transfer_count, + &funding_account, + &wormhole_address, + utxo.amount as u128, + )?, + ) + .map_err(|e| WormholeError { + message: format!("Storage proof preparation failed: {}", e), + })?; + + // Quantize input amount + let input_amount_quantized = quantize_amount(utxo.amount)?; + + // Prepare digest (padded to 110 bytes) + const DIGEST_LOGS_SIZE: usize = 110; + let mut digest_padded = [0u8; DIGEST_LOGS_SIZE]; + let copy_len = digest.len().min(DIGEST_LOGS_SIZE); + digest_padded[..copy_len].copy_from_slice(&digest[..copy_len]); + + // NOTE: We use the actual block_hash from the UTXO (parsed above), not a computed one. + // The circuit will verify that hash(header_components) == block_hash. + + // Build circuit inputs + let private = + qp_wormhole_circuit::inputs::PrivateCircuitInputs { + secret: secret_digest, + transfer_count: utxo.transfer_count, + funding_account: funding_account.as_slice().try_into().map_err(|e| { + WormholeError { + message: format!("Invalid funding account: {:?}", e), + } + })?, + storage_proof: processed_proof, + unspendable_account: unspendable_bytes, + parent_hash: parent_hash + .as_slice() + .try_into() + .map_err(|e| WormholeError { + message: format!("Invalid parent hash: {:?}", e), + })?, + state_root: state_root + .as_slice() + .try_into() + .map_err(|e| WormholeError { + message: format!("Invalid state root: {:?}", e), + })?, + extrinsics_root: extrinsics_root.as_slice().try_into().map_err(|e| { + WormholeError { + message: format!("Invalid extrinsics root: {:?}", e), + } + })?, + digest: digest_padded, + input_amount: input_amount_quantized, + }; + + let public = + qp_wormhole_inputs::PublicCircuitInputs { + asset_id: 0, // Native token + output_amount_1: output.output_amount_1, + output_amount_2: output.output_amount_2, + volume_fee_bps: fee_bps, + nullifier: nullifier_bytes, + exit_account_1: exit_account_1.as_slice().try_into().map_err(|e| { + WormholeError { + message: format!("Invalid exit account 1: {:?}", e), + } + })?, + exit_account_2: exit_account_2.as_slice().try_into().map_err(|e| { + WormholeError { + message: format!("Invalid exit account 2: {:?}", e), + } + })?, + block_hash: block_hash + .as_slice() + .try_into() + .map_err(|e| WormholeError { + message: format!("Invalid block hash: {:?}", e), + })?, + block_number: block_header.block_number, + }; + + let circuit_inputs = qp_wormhole_circuit::inputs::CircuitInputs { public, private }; + + // Clone prover and generate proof + let prover = self.clone_prover()?; + let prover_with_inputs = prover.commit(&circuit_inputs).map_err(|e| WormholeError { + message: format!("Failed to commit inputs: {}", e), + })?; + let proof = prover_with_inputs.prove().map_err(|e| WormholeError { + message: format!("Proof generation failed: {}", e), + })?; + + Ok(GeneratedProof { + proof_hex: format!("0x{}", hex::encode(proof.to_bytes())), + nullifier_hex: format!("0x{}", hex::encode(nullifier_bytes.as_ref())), + }) + } + + /// Clone the internal prover by reloading from files. + fn clone_prover(&self) -> Result { + let bins_path = std::path::Path::new(&self.bins_dir); + let prover_path = bins_path.join("prover.bin"); + let common_path = bins_path.join("common.bin"); + + qp_wormhole_prover::WormholeProver::new_from_files(&prover_path, &common_path).map_err( + |e| WormholeError { + message: format!("Failed to reload prover: {}", e), + }, + ) + } +} + +// ============================================================================ +// Proof Aggregator +// ============================================================================ + +// Re-import the plonky2 types via qp_zk_circuits_common +use qp_zk_circuits_common::circuit::{C, D, F}; +// Import plonky2 types for proof handling (qp-plonky2 is aliased as plonky2 in Cargo.toml) +// Use the same import paths as qp-wormhole-aggregator for type compatibility +use plonky2::plonk::circuit_data::CommonCircuitData; +use plonky2::plonk::proof::ProofWithPublicInputs; + +/// Opaque handle to a proof aggregator. +/// +/// The aggregator collects proofs and aggregates them into a single proof +/// for more efficient on-chain verification. +pub struct WormholeProofAggregator { + inner: Mutex, + common_data: CommonCircuitData, + batch_size: usize, +} + +impl WormholeProofAggregator { + /// Create a new proof aggregator from circuit files. + /// + /// # Arguments + /// * `bins_dir` - Path to directory containing aggregator circuit files + /// + /// # Returns + /// A new proof aggregator instance. + pub fn new(bins_dir: String) -> Result { + let bins_path = std::path::Path::new(&bins_dir); + + // Load config to get batch size + let config = CircuitConfig::load(&bins_dir)?; + let agg_config = + qp_zk_circuits_common::aggregation::AggregationConfig::new(config.num_leaf_proofs); + + let aggregator = + qp_wormhole_aggregator::aggregator::WormholeProofAggregator::from_prebuilt_dir( + bins_path, agg_config, + ) + .map_err(|e| WormholeError { + message: format!("Failed to load aggregator from {:?}: {}", bins_dir, e), + })?; + + let common_data = aggregator.leaf_circuit_data.common.clone(); + let batch_size = config.num_leaf_proofs; + + Ok(Self { + inner: Mutex::new(aggregator), + common_data, + batch_size, + }) + } + + /// Get the batch size (number of proofs per aggregation). + pub fn batch_size(&self) -> usize { + self.batch_size + } + + /// Get the number of proofs currently in the buffer. + pub fn proof_count(&self) -> Result { + let aggregator = self.inner.lock().map_err(|e| WormholeError { + message: format!("Failed to lock aggregator: {}", e), + })?; + Ok(aggregator + .proofs_buffer + .as_ref() + .map(|b| b.len()) + .unwrap_or(0)) + } + + /// Add a proof to the aggregation buffer. + /// + /// # Arguments + /// * `proof_hex` - The serialized proof bytes (hex encoded with 0x prefix) + pub fn add_proof(&self, proof_hex: String) -> Result<(), WormholeError> { + let proof_bytes = parse_hex(&proof_hex)?; + + let proof = ProofWithPublicInputs::::from_bytes(proof_bytes, &self.common_data) + .map_err(|e| WormholeError { + message: format!("Failed to deserialize proof: {:?}", e), + })?; + + // Debug: Log the block_hash from public inputs to verify it's not all zeros + // Block hash is at indices 16-19 (4 field elements after asset_id, output_amount_1, output_amount_2, volume_fee_bps, nullifier[4], exit_1[4], exit_2[4]) + if proof.public_inputs.len() >= 20 { + let block_hash: Vec = proof.public_inputs[16..20] + .iter() + .map(|f| f.to_canonical_u64()) + .collect(); + let is_dummy = block_hash.iter().all(|&v| v == 0); + log::info!( + "[SDK Aggregator] Adding proof with block_hash={:?}, is_dummy={}", + block_hash, + is_dummy + ); + } + + let mut aggregator = self.inner.lock().map_err(|e| WormholeError { + message: format!("Failed to lock aggregator: {}", e), + })?; + + aggregator.push_proof(proof).map_err(|e| WormholeError { + message: format!("Failed to add proof: {}", e), + }) + } + + /// Aggregate all proofs in the buffer. + /// + /// If fewer than `batch_size` proofs have been added, the remaining + /// slots are filled with dummy proofs automatically. + /// + /// # Returns + /// The aggregated proof. + pub fn aggregate(&self) -> Result { + let mut aggregator = self.inner.lock().map_err(|e| WormholeError { + message: format!("Failed to lock aggregator: {}", e), + })?; + + let num_real = aggregator + .proofs_buffer + .as_ref() + .map(|b| b.len()) + .unwrap_or(0); + + log::info!( + "[SDK Aggregator] Starting aggregation with {} real proofs, batch_size={}", + num_real, + self.batch_size + ); + + // Debug: Log block_hash of each proof in the buffer + if let Some(ref proofs) = aggregator.proofs_buffer { + for (i, proof) in proofs.iter().enumerate() { + if proof.public_inputs.len() >= 20 { + let block_hash: Vec = proof.public_inputs[16..20] + .iter() + .map(|f| f.to_canonical_u64()) + .collect(); + let is_dummy = block_hash.iter().all(|&v| v == 0); + log::info!( + "[SDK Aggregator] Proof {} in buffer: block_hash={:?}, is_dummy={}", + i, + block_hash, + is_dummy + ); + } + } + } + + let result = aggregator.aggregate().map_err(|e| WormholeError { + message: format!("Aggregation failed: {}", e), + })?; + + Ok(AggregatedProof { + proof_hex: format!("0x{}", hex::encode(result.proof.to_bytes())), + num_real_proofs: num_real, + }) + } + + /// Clear the proof buffer without aggregating. + pub fn clear(&self) -> Result<(), WormholeError> { + let mut aggregator = self.inner.lock().map_err(|e| WormholeError { + message: format!("Failed to lock aggregator: {}", e), + })?; + + aggregator.proofs_buffer = None; + Ok(()) + } +} + +// ============================================================================ +// FFI Factory Functions +// ============================================================================ + +/// Create a new proof generator. +/// +/// This loads ~171MB of circuit data, so it's expensive. Call once and reuse. +/// +/// # Arguments +/// * `bins_dir` - Path to directory containing prover.bin and common.bin +pub fn create_proof_generator(bins_dir: String) -> Result { + WormholeProofGenerator::new(bins_dir) +} + +/// Create a new proof aggregator. +/// +/// # Arguments +/// * `bins_dir` - Path to directory containing aggregator circuit files +pub fn create_proof_aggregator(bins_dir: String) -> Result { + WormholeProofAggregator::new(bins_dir) +} + +// ============================================================================ +// Helper Functions +// ============================================================================ + +/// Parse a hex string (with or without 0x prefix) into a 32-byte array. +fn parse_hex_32(hex_str: &str) -> Result<[u8; 32], WormholeError> { + let hex_str = hex_str.trim_start_matches("0x"); + let bytes = hex::decode(hex_str).map_err(|e| WormholeError { + message: format!("Invalid hex string: {}", e), + })?; + bytes.try_into().map_err(|_| WormholeError { + message: "Expected 32 bytes".to_string(), + }) +} + +/// Parse a hex string (with or without 0x prefix) into bytes. +fn parse_hex(hex_str: &str) -> Result, WormholeError> { + let hex_str = hex_str.trim_start_matches("0x"); + hex::decode(hex_str).map_err(|e| WormholeError { + message: format!("Invalid hex string: {}", e), + }) +} + +/// Convert an SS58 address to a 32-byte account ID. +fn ss58_to_bytes(ss58: &str) -> Result<[u8; 32], WormholeError> { + let account = AccountId32::from_ss58check(ss58).map_err(|e| WormholeError { + message: format!("Invalid SS58 address '{}': {:?}", ss58, e), + })?; + Ok(account.into()) +} + +/// Compute the transfer proof leaf hash for storage proof verification. +/// +/// Uses `hash_storage` to match the chain's PoseidonStorageHasher behavior, +/// which decodes the SCALE-encoded key and converts to felts via `ToFelts`. +fn compute_transfer_proof_leaf_hash( + asset_id: u32, + transfer_count: u64, + funding_account: &[u8; 32], + wormhole_address: &[u8; 32], + amount: u128, +) -> Result<[u8; 32], WormholeError> { + use codec::Encode; + + // TransferProofKey type on chain: (AssetId, TransferCount, AccountId, AccountId, Balance) + // AccountId is [u8; 32] internally, and ToFelts is implemented for [u8; 32] + type TransferProofKey = (u32, u64, [u8; 32], [u8; 32], u128); + + // SCALE encode the key tuple + let key: TransferProofKey = ( + asset_id, + transfer_count, + *funding_account, + *wormhole_address, + amount, + ); + let encoded = key.encode(); + + // Use hash_storage which decodes and converts to felts via ToFelts trait + // This matches how the chain's PoseidonStorageHasher works + let hash = qp_poseidon::PoseidonHasher::hash_storage::(&encoded); + + Ok(hash) +} + +/// Compute block hash from header components. +/// +/// This matches the Poseidon block hash computation used by the Quantus chain. +/// The hash is computed over the SCALE-encoded header components. +/// +/// # Arguments +/// * `parent_hash_hex` - Parent block hash (32 bytes, hex with 0x prefix) +/// * `state_root_hex` - State root (32 bytes, hex with 0x prefix) +/// * `extrinsics_root_hex` - Extrinsics root (32 bytes, hex with 0x prefix) +/// * `block_number` - Block number +/// * `digest_hex` - SCALE-encoded digest (hex with 0x prefix, from encode_digest_from_rpc_logs) +/// +/// # Returns +/// Block hash as hex string with 0x prefix. +#[flutter_rust_bridge::frb(sync)] +pub fn compute_block_hash( + parent_hash_hex: String, + state_root_hex: String, + extrinsics_root_hex: String, + block_number: u32, + digest_hex: String, +) -> Result { + let parent_hash = parse_hex_32(&parent_hash_hex)?; + let state_root = parse_hex_32(&state_root_hex)?; + let extrinsics_root = parse_hex_32(&extrinsics_root_hex)?; + let digest = parse_hex(&digest_hex)?; + + let hash = compute_block_hash_internal( + &parent_hash, + &state_root, + &extrinsics_root, + block_number, + &digest, + )?; + + Ok(format!("0x{}", hex::encode(hash))) +} + +/// Internal function to compute block hash from raw bytes. +/// Delegates to the circuit's HeaderInputs to guarantee hash consistency with ZK proofs. +fn compute_block_hash_internal( + parent_hash: &[u8; 32], + state_root: &[u8; 32], + extrinsics_root: &[u8; 32], + block_number: u32, + digest: &[u8], +) -> Result<[u8; 32], WormholeError> { + use qp_wormhole_circuit::block_header::header::{HeaderInputs, DIGEST_LOGS_SIZE}; + use qp_wormhole_inputs::BytesDigest; + + let digest_fixed: [u8; DIGEST_LOGS_SIZE] = digest.try_into().map_err(|_| WormholeError { + message: format!( + "Digest must be {} bytes, got {}", + DIGEST_LOGS_SIZE, + digest.len() + ), + })?; + + let header = HeaderInputs::new( + BytesDigest::try_from(*parent_hash).map_err(|e| WormholeError { + message: format!("Invalid parent hash: {:?}", e), + })?, + block_number, + BytesDigest::try_from(*state_root).map_err(|e| WormholeError { + message: format!("Invalid state root: {:?}", e), + })?, + BytesDigest::try_from(*extrinsics_root).map_err(|e| WormholeError { + message: format!("Invalid extrinsics root: {:?}", e), + })?, + &digest_fixed, + ) + .map_err(|e| WormholeError { + message: format!("Failed to create header inputs: {}", e), + })?; + + let block_hash = header.block_hash(); + let hash: [u8; 32] = block_hash + .as_ref() + .try_into() + .map_err(|_| WormholeError { + message: "Block hash conversion failed".to_string(), + })?; + Ok(hash) + +} + +// ============================================================================ +// Circuit Binary Generation +// ============================================================================ + +/// Result of circuit binary generation +#[flutter_rust_bridge::frb(sync)] +pub struct CircuitGenerationResult { + /// Whether generation succeeded + pub success: bool, + /// Error message if failed + pub error: Option, + /// Path to the generated binaries directory + pub output_dir: Option, +} + +/// Progress callback for circuit generation (phase name, progress 0.0-1.0) +pub type CircuitGenerationProgress = extern "C" fn(phase: *const i8, progress: f64); + +/// Generate circuit binary files for ZK proof generation. +/// +/// This is a long-running operation (10-30 minutes) that generates the +/// circuit binaries needed for wormhole withdrawal proofs. +/// +/// # Arguments +/// * `output_dir` - Directory to write the binaries to +/// * `num_leaf_proofs` - Number of leaf proofs per aggregation (typically 8) +/// +/// # Returns +/// A `CircuitGenerationResult` indicating success or failure. +/// +/// # Generated Files +/// - `prover.bin` - Prover circuit data (~163MB) +/// - `common.bin` - Common circuit data +/// - `verifier.bin` - Verifier circuit data +/// - `dummy_proof.bin` - Dummy proof for aggregation padding +/// - `aggregated_common.bin` - Aggregated circuit common data +/// - `aggregated_verifier.bin` - Aggregated circuit verifier data +/// - `config.json` - Configuration with hashes for integrity verification +pub fn generate_circuit_binaries( + output_dir: String, + num_leaf_proofs: u32, +) -> CircuitGenerationResult { + match qp_wormhole_circuit_builder::generate_all_circuit_binaries( + &output_dir, + true, // include_prover - we need it for proof generation + num_leaf_proofs as usize, + ) { + Ok(()) => CircuitGenerationResult { + success: true, + error: None, + output_dir: Some(output_dir), + }, + Err(e) => CircuitGenerationResult { + success: false, + error: Some(e.to_string()), + output_dir: None, + }, + } +} + +/// Check if circuit binaries exist and are valid in a directory. +/// +/// # Arguments +/// * `bins_dir` - Directory containing the circuit binaries +/// +/// # Returns +/// True if all required files exist, false otherwise. +#[flutter_rust_bridge::frb(sync)] +pub fn check_circuit_binaries_exist(bins_dir: String) -> bool { + use std::path::Path; + + let required_files = [ + "prover.bin", + "common.bin", + "verifier.bin", + "dummy_proof.bin", + "aggregated_common.bin", + "aggregated_verifier.bin", + "config.json", + ]; + + let path = Path::new(&bins_dir); + if !path.exists() { + return false; + } + + for file in &required_files { + if !path.join(file).exists() { + return false; + } + } + + true +} + +#[cfg(test)] +mod tests { + use super::*; + + const TEST_MNEMONIC: &str = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art"; + + #[test] + fn test_derive_wormhole_pair() { + let result = derive_wormhole_pair(TEST_MNEMONIC.to_string(), 1, 0).unwrap(); + + // Verify the result has the expected format + assert!(result.address.starts_with("q") || result.address.starts_with("5")); + assert!(result.address_hex.starts_with("0x")); + assert_eq!(result.address_hex.len(), 66); // 0x + 64 hex chars + assert!(result.first_hash_ss58.starts_with("q") || result.first_hash_ss58.starts_with("5")); + assert!(result.first_hash_hex.starts_with("0x")); + assert_eq!(result.first_hash_hex.len(), 66); + assert!(result.secret_hex.starts_with("0x")); + assert_eq!(result.secret_hex.len(), 66); + } + + #[test] + fn test_derive_deterministic() { + // Same mnemonic + path should always produce the same result + let result1 = derive_wormhole_pair(TEST_MNEMONIC.to_string(), 1, 0).unwrap(); + let result2 = derive_wormhole_pair(TEST_MNEMONIC.to_string(), 1, 0).unwrap(); + + assert_eq!(result1.address, result2.address); + assert_eq!(result1.first_hash_hex, result2.first_hash_hex); + assert_eq!(result1.secret_hex, result2.secret_hex); + } + + #[test] + fn test_different_indices_produce_different_addresses() { + let result0 = derive_wormhole_pair(TEST_MNEMONIC.to_string(), 1, 0).unwrap(); + let result1 = derive_wormhole_pair(TEST_MNEMONIC.to_string(), 1, 1).unwrap(); + + assert_ne!(result0.address, result1.address); + assert_ne!(result0.secret_hex, result1.secret_hex); + } + + #[test] + fn test_different_purposes_produce_different_addresses() { + let result_miner = derive_wormhole_pair(TEST_MNEMONIC.to_string(), 1, 0).unwrap(); + let result_mobile = derive_wormhole_pair(TEST_MNEMONIC.to_string(), 0, 0).unwrap(); + + assert_ne!(result_miner.address, result_mobile.address); + } + + #[test] + fn test_get_wormhole_derivation_path() { + let path = get_wormhole_derivation_path(1, 5); + assert!(path.contains("189189189'")); + assert!(path.contains("/1'/")); + assert!(path.contains("/5'")); + } + + #[test] + fn test_compute_nullifier_deterministic() { + let secret = "0x0101010101010101010101010101010101010101010101010101010101010101"; + let n1 = compute_nullifier(secret.to_string(), 42).unwrap(); + let n2 = compute_nullifier(secret.to_string(), 42).unwrap(); + assert_eq!(n1, n2); + } + + #[test] + fn test_compute_nullifier_different_transfer_count() { + let secret = "0x0101010101010101010101010101010101010101010101010101010101010101"; + let n1 = compute_nullifier(secret.to_string(), 1).unwrap(); + let n2 = compute_nullifier(secret.to_string(), 2).unwrap(); + assert_ne!(n1, n2); + } + + #[test] + fn test_quantize_amount() { + // 1 QTN = 1_000_000_000_000 planck (12 decimals) + // Quantized = 100 (2 decimals) + let planck = 1_000_000_000_000u64; + let quantized = quantize_amount(planck).unwrap(); + assert_eq!(quantized, 100); + + // Round trip + let dequantized = dequantize_amount(quantized); + assert_eq!(dequantized, planck); + } + + #[test] + fn test_derive_address_from_secret() { + // Derive a pair and verify the address matches + let pair = derive_wormhole_pair(TEST_MNEMONIC.to_string(), 1, 0).unwrap(); + let derived_addr = derive_address_from_secret(pair.secret_hex.clone()).unwrap(); + assert_eq!(derived_addr, pair.address); + } + + #[test] + fn test_block_hash_sdk_matches_circuit() { + use qp_wormhole_circuit::block_header::header::HeaderInputs; + use qp_wormhole_inputs::BytesDigest; + + let parent_hash = [0u8; 32]; + let block_number: u32 = 1; + let state_root: [u8; 32] = hex::decode( + "713c0468ddc5b657ce758a3fb75ec5ae906d95b334f24a4f5661cc775e1cdb43", + ) + .unwrap() + .try_into() + .unwrap(); + let extrinsics_root = [0u8; 32]; + #[rustfmt::skip] + let digest: [u8; 110] = [ + 8, 6, 112, 111, 119, 95, 128, 233, 182, 183, 107, 158, 1, 115, 19, 219, + 126, 253, 86, 30, 208, 176, 70, 21, 45, 180, 229, 9, 62, 91, 4, 6, 53, + 245, 52, 48, 38, 123, 225, 5, 112, 111, 119, 95, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 18, 79, 226, + ]; + #[rustfmt::skip] + let expected: [u8; 32] = [ + 160, 247, 232, 22, 150, 117, 245, 140, 3, 70, 175, 175, 22, 247, 90, 37, + 231, 80, 170, 11, 27, 183, 40, 51, 5, 19, 164, 19, 188, 192, 229, 212, + ]; + + let sdk_hash = + compute_block_hash_internal(&parent_hash, &state_root, &extrinsics_root, block_number, &digest) + .expect("SDK block hash computation failed"); + + let circuit_hash = HeaderInputs::new( + BytesDigest::try_from(parent_hash).unwrap(), + block_number, + BytesDigest::try_from(state_root).unwrap(), + BytesDigest::try_from(extrinsics_root).unwrap(), + &digest, + ) + .unwrap() + .block_hash(); + + assert_eq!( + circuit_hash.as_ref(), + &expected, + "Circuit hash sanity check against known fixture" + ); + assert_eq!( + sdk_hash, expected, + "SDK hash must match the circuit's known block hash" + ); + } + + #[test] + fn test_block_hash_sdk_matches_circuit_nonzero_inputs() { + use qp_wormhole_circuit::block_header::header::HeaderInputs; + use qp_wormhole_inputs::BytesDigest; + + #[rustfmt::skip] + let parent_hash: [u8; 32] = [ + 160, 247, 232, 22, 150, 117, 245, 140, 3, 70, 175, 175, 22, 247, 90, 37, + 231, 80, 170, 11, 27, 183, 40, 51, 5, 19, 164, 19, 188, 192, 229, 212, + ]; + let block_number: u32 = 2; + let state_root: [u8; 32] = hex::decode( + "2f10a7c86fdd3758d1174e955a5f6efbbef29b41850720853ee4843a2a0d48a7", + ) + .unwrap() + .try_into() + .unwrap(); + let extrinsics_root = [0u8; 32]; + #[rustfmt::skip] + let digest: [u8; 110] = [ + 8, 6, 112, 111, 119, 95, 128, 233, 182, 183, 107, 158, 1, 115, 19, 219, + 126, 253, 86, 30, 208, 176, 70, 21, 45, 180, 229, 9, 62, 91, 4, 6, 53, + 245, 52, 48, 38, 123, 225, 5, 112, 111, 119, 95, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 18, 79, 226, + ]; + #[rustfmt::skip] + let expected: [u8; 32] = [ + 123, 3, 1, 4, 129, 152, 164, 69, 52, 213, 96, 85, 78, 201, 4, 176, 26, + 84, 254, 144, 212, 78, 187, 6, 221, 141, 198, 216, 24, 52, 122, 31, + ]; + + let sdk_hash = + compute_block_hash_internal(&parent_hash, &state_root, &extrinsics_root, block_number, &digest) + .expect("SDK block hash computation failed"); + + let circuit_hash = HeaderInputs::new( + BytesDigest::try_from(parent_hash).unwrap(), + block_number, + BytesDigest::try_from(state_root).unwrap(), + BytesDigest::try_from(extrinsics_root).unwrap(), + &digest, + ) + .unwrap() + .block_hash(); + + assert_eq!( + circuit_hash.as_ref(), + &expected, + "Circuit hash sanity check against known fixture" + ); + assert_eq!( + sdk_hash, expected, + "SDK hash must match the circuit's known block hash" + ); + } +} diff --git a/quantus_sdk/rust/src/frb_generated.rs b/quantus_sdk/rust/src/frb_generated.rs index 71038b4c..0f257f27 100644 --- a/quantus_sdk/rust/src/frb_generated.rs +++ b/quantus_sdk/rust/src/frb_generated.rs @@ -26,6 +26,7 @@ // Section: imports use crate::api::crypto::*; +use crate::api::wormhole::*; use flutter_rust_bridge::for_generated::byteorder::{NativeEndian, ReadBytesExt, WriteBytesExt}; use flutter_rust_bridge::for_generated::{transform_result_dco, Lifetimeable, Lockable}; use flutter_rust_bridge::{Handler, IntoIntoDart}; @@ -38,7 +39,7 @@ flutter_rust_bridge::frb_generated_boilerplate!( default_rust_auto_opaque = RustAutoOpaqueMoi, ); pub(crate) const FLUTTER_RUST_BRIDGE_CODEGEN_VERSION: &str = "2.11.1"; -pub(crate) const FLUTTER_RUST_BRIDGE_CODEGEN_CONTENT_HASH: i32 = 1692591137; +pub(crate) const FLUTTER_RUST_BRIDGE_CODEGEN_CONTENT_HASH: i32 = 1665864519; // Section: executor @@ -46,16 +47,17 @@ flutter_rust_bridge::frb_generated_default_handler!(); // Section: wire_funcs -fn wire__crate__api__crypto__crystal_alice_impl( +fn wire__crate__api__wormhole__WormholeProofAggregator_add_proof_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, -) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { - FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "crystal_alice", - port: None, - mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + debug_name: "WormholeProofAggregator_add_proof", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, }, move || { let message = unsafe { @@ -67,24 +69,48 @@ fn wire__crate__api__crypto__crystal_alice_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_that = , + >>::sse_decode(&mut deserializer); + let api_proof_hex = ::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::crystal_alice())?; - Ok(output_ok) - })()) + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let mut api_that_guard = None; + let decode_indices_ = + flutter_rust_bridge::for_generated::lockable_compute_decode_order(vec![ + flutter_rust_bridge::for_generated::LockableOrderInfo::new( + &api_that, 0, false, + ), + ]); + for i in decode_indices_ { + match i { + 0 => api_that_guard = Some(api_that.lockable_decode_sync_ref()), + _ => unreachable!(), + } + } + let api_that_guard = api_that_guard.unwrap(); + let output_ok = crate::api::wormhole::WormholeProofAggregator::add_proof( + &*api_that_guard, + api_proof_hex, + )?; + Ok(output_ok) + })()) + } }, ) } -fn wire__crate__api__crypto__crystal_bob_impl( +fn wire__crate__api__wormhole__WormholeProofAggregator_aggregate_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, -) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { - FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "crystal_bob", - port: None, - mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + debug_name: "WormholeProofAggregator_aggregate", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, }, move || { let message = unsafe { @@ -96,24 +122,45 @@ fn wire__crate__api__crypto__crystal_bob_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_that = , + >>::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::crystal_bob())?; - Ok(output_ok) - })()) + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let mut api_that_guard = None; + let decode_indices_ = + flutter_rust_bridge::for_generated::lockable_compute_decode_order(vec![ + flutter_rust_bridge::for_generated::LockableOrderInfo::new( + &api_that, 0, false, + ), + ]); + for i in decode_indices_ { + match i { + 0 => api_that_guard = Some(api_that.lockable_decode_sync_ref()), + _ => unreachable!(), + } + } + let api_that_guard = api_that_guard.unwrap(); + let output_ok = + crate::api::wormhole::WormholeProofAggregator::aggregate(&*api_that_guard)?; + Ok(output_ok) + })()) + } }, ) } -fn wire__crate__api__crypto__crystal_charlie_impl( +fn wire__crate__api__wormhole__WormholeProofAggregator_batch_size_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, -) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { - FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "crystal_charlie", - port: None, - mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + debug_name: "WormholeProofAggregator_batch_size", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, }, move || { let message = unsafe { @@ -125,24 +172,46 @@ fn wire__crate__api__crypto__crystal_charlie_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_that = , + >>::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::crystal_charlie())?; - Ok(output_ok) - })()) + move |context| { + transform_result_sse::<_, ()>((move || { + let mut api_that_guard = None; + let decode_indices_ = + flutter_rust_bridge::for_generated::lockable_compute_decode_order(vec![ + flutter_rust_bridge::for_generated::LockableOrderInfo::new( + &api_that, 0, false, + ), + ]); + for i in decode_indices_ { + match i { + 0 => api_that_guard = Some(api_that.lockable_decode_sync_ref()), + _ => unreachable!(), + } + } + let api_that_guard = api_that_guard.unwrap(); + let output_ok = Result::<_, ()>::Ok( + crate::api::wormhole::WormholeProofAggregator::batch_size(&*api_that_guard), + )?; + Ok(output_ok) + })()) + } }, ) } -fn wire__crate__api__ur__decode_ur_impl( +fn wire__crate__api__wormhole__WormholeProofAggregator_clear_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, -) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { - FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "decode_ur", - port: None, - mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + debug_name: "WormholeProofAggregator_clear", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, }, move || { let message = unsafe { @@ -154,23 +223,127 @@ fn wire__crate__api__ur__decode_ur_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_ur_parts = >::sse_decode(&mut deserializer); + let api_that = , + >>::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, String>((move || { - let output_ok = crate::api::ur::decode_ur(api_ur_parts)?; - Ok(output_ok) - })()) + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let mut api_that_guard = None; + let decode_indices_ = + flutter_rust_bridge::for_generated::lockable_compute_decode_order(vec![ + flutter_rust_bridge::for_generated::LockableOrderInfo::new( + &api_that, 0, false, + ), + ]); + for i in decode_indices_ { + match i { + 0 => api_that_guard = Some(api_that.lockable_decode_sync_ref()), + _ => unreachable!(), + } + } + let api_that_guard = api_that_guard.unwrap(); + let output_ok = + crate::api::wormhole::WormholeProofAggregator::clear(&*api_that_guard)?; + Ok(output_ok) + })()) + } }, ) } -fn wire__crate__api__crypto__derive_hd_path_impl( +fn wire__crate__api__wormhole__WormholeProofAggregator_new_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "WormholeProofAggregator_new", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_bins_dir = ::sse_decode(&mut deserializer); + deserializer.end(); + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = + crate::api::wormhole::WormholeProofAggregator::new(api_bins_dir)?; + Ok(output_ok) + })()) + } + }, + ) +} +fn wire__crate__api__wormhole__WormholeProofAggregator_proof_count_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "WormholeProofAggregator_proof_count", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_that = , + >>::sse_decode(&mut deserializer); + deserializer.end(); + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let mut api_that_guard = None; + let decode_indices_ = + flutter_rust_bridge::for_generated::lockable_compute_decode_order(vec![ + flutter_rust_bridge::for_generated::LockableOrderInfo::new( + &api_that, 0, false, + ), + ]); + for i in decode_indices_ { + match i { + 0 => api_that_guard = Some(api_that.lockable_decode_sync_ref()), + _ => unreachable!(), + } + } + let api_that_guard = api_that_guard.unwrap(); + let output_ok = crate::api::wormhole::WormholeProofAggregator::proof_count( + &*api_that_guard, + )?; + Ok(output_ok) + })()) + } + }, + ) +} +fn wire__crate__api__wormhole__check_circuit_binaries_exist_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "derive_hd_path", + debug_name: "check_circuit_binaries_exist", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -184,25 +357,58 @@ fn wire__crate__api__crypto__derive_hd_path_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_seed = >::sse_decode(&mut deserializer); - let api_path = ::sse_decode(&mut deserializer); + let api_bins_dir = ::sse_decode(&mut deserializer); deserializer.end(); transform_result_sse::<_, ()>((move || { - let output_ok = - Result::<_, ()>::Ok(crate::api::crypto::derive_hd_path(api_seed, api_path))?; + let output_ok = Result::<_, ()>::Ok( + crate::api::wormhole::check_circuit_binaries_exist(api_bins_dir), + )?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__ur__encode_ur_impl( +fn wire__crate__api__wormhole__circuit_config_load_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "circuit_config_load", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_bins_dir = ::sse_decode(&mut deserializer); + deserializer.end(); + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::CircuitConfig::load(&api_bins_dir)?; + Ok(output_ok) + })()) + } + }, + ) +} +fn wire__crate__api__wormhole__compute_block_hash_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "encode_ur", + debug_name: "compute_block_hash", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -216,23 +422,33 @@ fn wire__crate__api__ur__encode_ur_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_data = >::sse_decode(&mut deserializer); + let api_parent_hash_hex = ::sse_decode(&mut deserializer); + let api_state_root_hex = ::sse_decode(&mut deserializer); + let api_extrinsics_root_hex = ::sse_decode(&mut deserializer); + let api_block_number = ::sse_decode(&mut deserializer); + let api_digest_hex = ::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, String>((move || { - let output_ok = crate::api::ur::encode_ur(api_data)?; + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::compute_block_hash( + api_parent_hash_hex, + api_state_root_hex, + api_extrinsics_root_hex, + api_block_number, + api_digest_hex, + )?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__generate_derived_keypair_impl( +fn wire__crate__api__wormhole__compute_nullifier_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "generate_derived_keypair", + debug_name: "compute_nullifier", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -246,25 +462,25 @@ fn wire__crate__api__crypto__generate_derived_keypair_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_mnemonic_str = ::sse_decode(&mut deserializer); - let api_path = ::sse_decode(&mut deserializer); + let api_secret_hex = ::sse_decode(&mut deserializer); + let api_transfer_count = ::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, HDLatticeError>((move || { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { let output_ok = - crate::api::crypto::generate_derived_keypair(api_mnemonic_str, &api_path)?; + crate::api::wormhole::compute_nullifier(api_secret_hex, api_transfer_count)?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__generate_keypair_impl( +fn wire__crate__api__wormhole__compute_output_amount_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "generate_keypair", + debug_name: "compute_output_amount", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -278,24 +494,27 @@ fn wire__crate__api__crypto__generate_keypair_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_mnemonic_str = ::sse_decode(&mut deserializer); + let api_input_amount = ::sse_decode(&mut deserializer); + let api_fee_bps = ::sse_decode(&mut deserializer); deserializer.end(); transform_result_sse::<_, ()>((move || { - let output_ok = - Result::<_, ()>::Ok(crate::api::crypto::generate_keypair(api_mnemonic_str))?; + let output_ok = Result::<_, ()>::Ok(crate::api::wormhole::compute_output_amount( + api_input_amount, + api_fee_bps, + ))?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__generate_keypair_from_seed_impl( +fn wire__crate__api__wormhole__compute_transfer_proof_storage_key_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "generate_keypair_from_seed", + debug_name: "compute_transfer_proof_storage_key", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -309,17 +528,24 @@ fn wire__crate__api__crypto__generate_keypair_from_seed_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_seed = >::sse_decode(&mut deserializer); + let api_secret_hex = ::sse_decode(&mut deserializer); + let api_transfer_count = ::sse_decode(&mut deserializer); + let api_funding_account = ::sse_decode(&mut deserializer); + let api_amount = ::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = - Result::<_, ()>::Ok(crate::api::crypto::generate_keypair_from_seed(api_seed))?; + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::compute_transfer_proof_storage_key( + api_secret_hex, + api_transfer_count, + api_funding_account, + api_amount, + )?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__init_app_impl( +fn wire__crate__api__wormhole__create_proof_aggregator_impl( port_: flutter_rust_bridge::for_generated::MessagePort, ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, @@ -327,7 +553,7 @@ fn wire__crate__api__crypto__init_app_impl( ) { FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "init_app", + debug_name: "create_proof_aggregator", port: Some(port_), mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, }, @@ -341,28 +567,28 @@ fn wire__crate__api__crypto__init_app_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_bins_dir = ::sse_decode(&mut deserializer); deserializer.end(); move |context| { - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok({ - crate::api::crypto::init_app(); - })?; + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::create_proof_aggregator(api_bins_dir)?; Ok(output_ok) })()) } }, ) } -fn wire__crate__api__ur__is_complete_ur_impl( +fn wire__crate__api__wormhole__create_proof_generator_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, -) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { - FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "is_complete_ur", - port: None, - mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + debug_name: "create_proof_generator", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, }, move || { let message = unsafe { @@ -374,23 +600,25 @@ fn wire__crate__api__ur__is_complete_ur_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_ur_parts = >::sse_decode(&mut deserializer); + let api_bins_dir = ::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::ur::is_complete_ur(api_ur_parts))?; - Ok(output_ok) - })()) + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::create_proof_generator(api_bins_dir)?; + Ok(output_ok) + })()) + } }, ) } -fn wire__crate__api__crypto__public_key_bytes_impl( +fn wire__crate__api__crypto__crystal_alice_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "public_key_bytes", + debug_name: "crystal_alice", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -406,20 +634,20 @@ fn wire__crate__api__crypto__public_key_bytes_impl( flutter_rust_bridge::for_generated::SseDeserializer::new(message); deserializer.end(); transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::public_key_bytes())?; + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::crystal_alice())?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__secret_key_bytes_impl( +fn wire__crate__api__crypto__crystal_bob_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "secret_key_bytes", + debug_name: "crystal_bob", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -435,20 +663,20 @@ fn wire__crate__api__crypto__secret_key_bytes_impl( flutter_rust_bridge::for_generated::SseDeserializer::new(message); deserializer.end(); transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::secret_key_bytes())?; + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::crystal_bob())?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__set_default_ss58_prefix_impl( +fn wire__crate__api__crypto__crystal_charlie_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "set_default_ss58_prefix", + debug_name: "crystal_charlie", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -462,25 +690,22 @@ fn wire__crate__api__crypto__set_default_ss58_prefix_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_prefix = ::sse_decode(&mut deserializer); deserializer.end(); transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok({ - crate::api::crypto::set_default_ss58_prefix(api_prefix); - })?; + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::crystal_charlie())?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__sign_message_impl( +fn wire__crate__api__ur__decode_ur_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "sign_message", + debug_name: "decode_ur", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -494,29 +719,23 @@ fn wire__crate__api__crypto__sign_message_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_keypair = ::sse_decode(&mut deserializer); - let api_message = >::sse_decode(&mut deserializer); - let api_entropy = >::sse_decode(&mut deserializer); + let api_ur_parts = >::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::sign_message( - &api_keypair, - &api_message, - api_entropy, - ))?; + transform_result_sse::<_, String>((move || { + let output_ok = crate::api::ur::decode_ur(api_ur_parts)?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__sign_message_with_pubkey_impl( +fn wire__crate__api__wormhole__dequantize_amount_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "sign_message_with_pubkey", + debug_name: "dequantize_amount", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -530,29 +749,25 @@ fn wire__crate__api__crypto__sign_message_with_pubkey_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_keypair = ::sse_decode(&mut deserializer); - let api_message = >::sse_decode(&mut deserializer); - let api_entropy = >::sse_decode(&mut deserializer); + let api_quantized_amount = ::sse_decode(&mut deserializer); deserializer.end(); transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::sign_message_with_pubkey( - &api_keypair, - &api_message, - api_entropy, + let output_ok = Result::<_, ()>::Ok(crate::api::wormhole::dequantize_amount( + api_quantized_amount, ))?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__signature_bytes_impl( +fn wire__crate__api__wormhole__derive_address_from_secret_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "signature_bytes", + debug_name: "derive_address_from_secret", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -566,22 +781,23 @@ fn wire__crate__api__crypto__signature_bytes_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_secret_hex = ::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::signature_bytes())?; + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::derive_address_from_secret(api_secret_hex)?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__ss58_to_account_id_impl( +fn wire__crate__api__crypto__derive_hd_path_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "ss58_to_account_id", + debug_name: "derive_hd_path", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -595,24 +811,25 @@ fn wire__crate__api__crypto__ss58_to_account_id_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_s = ::sse_decode(&mut deserializer); + let api_seed = >::sse_decode(&mut deserializer); + let api_path = ::sse_decode(&mut deserializer); deserializer.end(); transform_result_sse::<_, ()>((move || { let output_ok = - Result::<_, ()>::Ok(crate::api::crypto::ss58_to_account_id(&api_s))?; + Result::<_, ()>::Ok(crate::api::crypto::derive_hd_path(api_seed, api_path))?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__to_account_id_impl( +fn wire__crate__api__wormhole__derive_wormhole_pair_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "to_account_id", + debug_name: "derive_wormhole_pair", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -626,23 +843,29 @@ fn wire__crate__api__crypto__to_account_id_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_obj = ::sse_decode(&mut deserializer); + let api_mnemonic = ::sse_decode(&mut deserializer); + let api_purpose = ::sse_decode(&mut deserializer); + let api_index = ::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::to_account_id(&api_obj))?; + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::derive_wormhole_pair( + api_mnemonic, + api_purpose, + api_index, + )?; Ok(output_ok) })()) }, ) } -fn wire__crate__api__crypto__verify_message_impl( +fn wire__crate__api__wormhole__encode_digest_from_rpc_logs_impl( ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, rust_vec_len_: i32, data_len_: i32, ) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( flutter_rust_bridge::for_generated::TaskInfo { - debug_name: "verify_message", + debug_name: "encode_digest_from_rpc_logs", port: None, mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, }, @@ -656,230 +879,1536 @@ fn wire__crate__api__crypto__verify_message_impl( }; let mut deserializer = flutter_rust_bridge::for_generated::SseDeserializer::new(message); - let api_keypair = ::sse_decode(&mut deserializer); - let api_message = >::sse_decode(&mut deserializer); - let api_signature = >::sse_decode(&mut deserializer); + let api_logs_hex = >::sse_decode(&mut deserializer); deserializer.end(); - transform_result_sse::<_, ()>((move || { - let output_ok = Result::<_, ()>::Ok(crate::api::crypto::verify_message( - &api_keypair, - &api_message, - &api_signature, - ))?; + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::encode_digest_from_rpc_logs(api_logs_hex)?; Ok(output_ok) })()) }, ) } - -// Section: related_funcs - -flutter_rust_bridge::frb_generated_moi_arc_impl_value!( - flutter_rust_bridge::for_generated::RustAutoOpaqueInner -); - -// Section: dart2rust - -impl SseDecode for HDLatticeError { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - let mut inner = , - >>::sse_decode(deserializer); - return flutter_rust_bridge::for_generated::rust_auto_opaque_decode_owned(inner); - } +fn wire__crate__api__ur__encode_ur_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "encode_ur", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_data = >::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, String>((move || { + let output_ok = crate::api::ur::encode_ur(api_data)?; + Ok(output_ok) + })()) + }, + ) } - -impl SseDecode - for RustOpaqueMoi> -{ - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - let mut inner = ::sse_decode(deserializer); - return decode_rust_opaque_moi(inner); - } +fn wire__crate__api__wormhole__first_hash_to_address_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "first_hash_to_address", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_first_hash_hex = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::first_hash_to_address(api_first_hash_hex)?; + Ok(output_ok) + })()) + }, + ) } - -impl SseDecode for String { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - let mut inner = >::sse_decode(deserializer); - return String::from_utf8(inner).unwrap(); - } +fn wire__crate__api__wormhole__generate_circuit_binaries_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "generate_circuit_binaries", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_output_dir = ::sse_decode(&mut deserializer); + let api_num_leaf_proofs = ::sse_decode(&mut deserializer); + deserializer.end(); + move |context| { + transform_result_sse::<_, ()>((move || { + let output_ok = + Result::<_, ()>::Ok(crate::api::wormhole::generate_circuit_binaries( + api_output_dir, + api_num_leaf_proofs, + ))?; + Ok(output_ok) + })()) + } + }, + ) } - -impl SseDecode for bool { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - deserializer.cursor.read_u8().unwrap() != 0 - } +fn wire__crate__api__crypto__generate_derived_keypair_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "generate_derived_keypair", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_mnemonic_str = ::sse_decode(&mut deserializer); + let api_path = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, HDLatticeError>((move || { + let output_ok = + crate::api::crypto::generate_derived_keypair(api_mnemonic_str, &api_path)?; + Ok(output_ok) + })()) + }, + ) } - -impl SseDecode for crate::api::crypto::Keypair { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - let mut var_publicKey = >::sse_decode(deserializer); - let mut var_secretKey = >::sse_decode(deserializer); - return crate::api::crypto::Keypair { - public_key: var_publicKey, - secret_key: var_secretKey, - }; +fn wire__crate__api__crypto__generate_keypair_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "generate_keypair", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_mnemonic_str = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = + Result::<_, ()>::Ok(crate::api::crypto::generate_keypair(api_mnemonic_str))?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__generate_keypair_from_seed_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "generate_keypair_from_seed", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_seed = >::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = + Result::<_, ()>::Ok(crate::api::crypto::generate_keypair_from_seed(api_seed))?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__wormhole__get_aggregation_batch_size_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "get_aggregation_batch_size", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_bins_dir = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::get_aggregation_batch_size(api_bins_dir)?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__wormhole__get_wormhole_derivation_path_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "get_wormhole_derivation_path", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_purpose = ::sse_decode(&mut deserializer); + let api_index = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok( + crate::api::wormhole::get_wormhole_derivation_path(api_purpose, api_index), + )?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__init_app_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "init_app", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + deserializer.end(); + move |context| { + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok({ + crate::api::crypto::init_app(); + })?; + Ok(output_ok) + })()) + } + }, + ) +} +fn wire__crate__api__ur__is_complete_ur_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "is_complete_ur", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_ur_parts = >::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok(crate::api::ur::is_complete_ur(api_ur_parts))?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__public_key_bytes_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "public_key_bytes", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::public_key_bytes())?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__wormhole__quantize_amount_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "quantize_amount", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_amount_planck = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::quantize_amount(api_amount_planck)?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__secret_key_bytes_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "secret_key_bytes", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::secret_key_bytes())?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__set_default_ss58_prefix_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "set_default_ss58_prefix", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_prefix = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok({ + crate::api::crypto::set_default_ss58_prefix(api_prefix); + })?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__sign_message_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "sign_message", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_keypair = ::sse_decode(&mut deserializer); + let api_message = >::sse_decode(&mut deserializer); + let api_entropy = >::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::sign_message( + &api_keypair, + &api_message, + api_entropy, + ))?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__sign_message_with_pubkey_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "sign_message_with_pubkey", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_keypair = ::sse_decode(&mut deserializer); + let api_message = >::sse_decode(&mut deserializer); + let api_entropy = >::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::sign_message_with_pubkey( + &api_keypair, + &api_message, + api_entropy, + ))?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__signature_bytes_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "signature_bytes", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::signature_bytes())?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__ss58_to_account_id_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "ss58_to_account_id", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_s = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = + Result::<_, ()>::Ok(crate::api::crypto::ss58_to_account_id(&api_s))?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__to_account_id_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "to_account_id", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_obj = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::to_account_id(&api_obj))?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__crypto__verify_message_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "verify_message", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_keypair = ::sse_decode(&mut deserializer); + let api_message = >::sse_decode(&mut deserializer); + let api_signature = >::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok(crate::api::crypto::verify_message( + &api_keypair, + &api_message, + &api_signature, + ))?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__wormhole__wormhole_error_to_display_string_impl( + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_sync::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "wormhole_error_to_display_string(dart_style=toString)", + port: None, + mode: flutter_rust_bridge::for_generated::FfiCallMode::Sync, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_that = ::sse_decode(&mut deserializer); + deserializer.end(); + transform_result_sse::<_, ()>((move || { + let output_ok = Result::<_, ()>::Ok( + crate::api::wormhole::WormholeError::to_display_string(&api_that), + )?; + Ok(output_ok) + })()) + }, + ) +} +fn wire__crate__api__wormhole__wormhole_proof_generator_generate_proof_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "wormhole_proof_generator_generate_proof", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_that = + ::sse_decode(&mut deserializer); + let api_utxo = ::sse_decode(&mut deserializer); + let api_output = + ::sse_decode(&mut deserializer); + let api_fee_bps = ::sse_decode(&mut deserializer); + let api_block_header = + ::sse_decode(&mut deserializer); + let api_storage_proof = + ::sse_decode(&mut deserializer); + deserializer.end(); + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = crate::api::wormhole::WormholeProofGenerator::generate_proof( + &api_that, + api_utxo, + api_output, + api_fee_bps, + api_block_header, + api_storage_proof, + )?; + Ok(output_ok) + })()) + } + }, + ) +} +fn wire__crate__api__wormhole__wormhole_proof_generator_new_impl( + port_: flutter_rust_bridge::for_generated::MessagePort, + ptr_: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len_: i32, + data_len_: i32, +) { + FLUTTER_RUST_BRIDGE_HANDLER.wrap_normal::( + flutter_rust_bridge::for_generated::TaskInfo { + debug_name: "wormhole_proof_generator_new", + port: Some(port_), + mode: flutter_rust_bridge::for_generated::FfiCallMode::Normal, + }, + move || { + let message = unsafe { + flutter_rust_bridge::for_generated::Dart2RustMessageSse::from_wire( + ptr_, + rust_vec_len_, + data_len_, + ) + }; + let mut deserializer = + flutter_rust_bridge::for_generated::SseDeserializer::new(message); + let api_bins_dir = ::sse_decode(&mut deserializer); + deserializer.end(); + move |context| { + transform_result_sse::<_, crate::api::wormhole::WormholeError>((move || { + let output_ok = + crate::api::wormhole::WormholeProofGenerator::new(api_bins_dir)?; + Ok(output_ok) + })()) + } + }, + ) +} + +// Section: related_funcs + +flutter_rust_bridge::frb_generated_moi_arc_impl_value!( + flutter_rust_bridge::for_generated::RustAutoOpaqueInner +); +flutter_rust_bridge::frb_generated_moi_arc_impl_value!( + flutter_rust_bridge::for_generated::RustAutoOpaqueInner +); + +// Section: dart2rust + +impl SseDecode for HDLatticeError { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut inner = , + >>::sse_decode(deserializer); + return flutter_rust_bridge::for_generated::rust_auto_opaque_decode_owned(inner); + } +} + +impl SseDecode for WormholeProofAggregator { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut inner = , + >>::sse_decode(deserializer); + return flutter_rust_bridge::for_generated::rust_auto_opaque_decode_owned(inner); + } +} + +impl SseDecode + for RustOpaqueMoi> +{ + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut inner = ::sse_decode(deserializer); + return decode_rust_opaque_moi(inner); + } +} + +impl SseDecode + for RustOpaqueMoi< + flutter_rust_bridge::for_generated::RustAutoOpaqueInner, + > +{ + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut inner = ::sse_decode(deserializer); + return decode_rust_opaque_moi(inner); + } +} + +impl SseDecode for String { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut inner = >::sse_decode(deserializer); + return String::from_utf8(inner).unwrap(); + } +} + +impl SseDecode for crate::api::wormhole::AggregatedProof { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_proofHex = ::sse_decode(deserializer); + let mut var_numRealProofs = ::sse_decode(deserializer); + return crate::api::wormhole::AggregatedProof { + proof_hex: var_proofHex, + num_real_proofs: var_numRealProofs, + }; + } +} + +impl SseDecode for crate::api::wormhole::BlockHeaderData { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_parentHashHex = ::sse_decode(deserializer); + let mut var_stateRootHex = ::sse_decode(deserializer); + let mut var_extrinsicsRootHex = ::sse_decode(deserializer); + let mut var_blockNumber = ::sse_decode(deserializer); + let mut var_digestHex = ::sse_decode(deserializer); + return crate::api::wormhole::BlockHeaderData { + parent_hash_hex: var_parentHashHex, + state_root_hex: var_stateRootHex, + extrinsics_root_hex: var_extrinsicsRootHex, + block_number: var_blockNumber, + digest_hex: var_digestHex, + }; + } +} + +impl SseDecode for bool { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + deserializer.cursor.read_u8().unwrap() != 0 + } +} + +impl SseDecode for crate::api::wormhole::CircuitConfig { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_numLeafProofs = ::sse_decode(deserializer); + return crate::api::wormhole::CircuitConfig { + num_leaf_proofs: var_numLeafProofs, + }; + } +} + +impl SseDecode for crate::api::wormhole::CircuitGenerationResult { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_success = ::sse_decode(deserializer); + let mut var_error = >::sse_decode(deserializer); + let mut var_outputDir = >::sse_decode(deserializer); + return crate::api::wormhole::CircuitGenerationResult { + success: var_success, + error: var_error, + output_dir: var_outputDir, + }; + } +} + +impl SseDecode for crate::api::wormhole::GeneratedProof { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_proofHex = ::sse_decode(deserializer); + let mut var_nullifierHex = ::sse_decode(deserializer); + return crate::api::wormhole::GeneratedProof { + proof_hex: var_proofHex, + nullifier_hex: var_nullifierHex, + }; + } +} + +impl SseDecode for crate::api::crypto::Keypair { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_publicKey = >::sse_decode(deserializer); + let mut var_secretKey = >::sse_decode(deserializer); + return crate::api::crypto::Keypair { + public_key: var_publicKey, + secret_key: var_secretKey, + }; + } +} + +impl SseDecode for Vec { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut len_ = ::sse_decode(deserializer); + let mut ans_ = vec![]; + for idx_ in 0..len_ { + ans_.push(::sse_decode(deserializer)); + } + return ans_; + } +} + +impl SseDecode for Vec { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut len_ = ::sse_decode(deserializer); + let mut ans_ = vec![]; + for idx_ in 0..len_ { + ans_.push(::sse_decode(deserializer)); + } + return ans_; + } +} + +impl SseDecode for Option { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + if (::sse_decode(deserializer)) { + return Some(::sse_decode(deserializer)); + } else { + return None; + } + } +} + +impl SseDecode for Option<[u8; 32]> { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + if (::sse_decode(deserializer)) { + return Some(<[u8; 32]>::sse_decode(deserializer)); + } else { + return None; + } + } +} + +impl SseDecode for crate::api::wormhole::ProofOutputAssignment { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_outputAmount1 = ::sse_decode(deserializer); + let mut var_exitAccount1 = ::sse_decode(deserializer); + let mut var_outputAmount2 = ::sse_decode(deserializer); + let mut var_exitAccount2 = ::sse_decode(deserializer); + return crate::api::wormhole::ProofOutputAssignment { + output_amount_1: var_outputAmount1, + exit_account_1: var_exitAccount1, + output_amount_2: var_outputAmount2, + exit_account_2: var_exitAccount2, + }; + } +} + +impl SseDecode for crate::api::wormhole::StorageProofData { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_proofNodesHex = >::sse_decode(deserializer); + let mut var_stateRootHex = ::sse_decode(deserializer); + return crate::api::wormhole::StorageProofData { + proof_nodes_hex: var_proofNodesHex, + state_root_hex: var_stateRootHex, + }; + } +} + +impl SseDecode for u16 { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + deserializer.cursor.read_u16::().unwrap() + } +} + +impl SseDecode for u32 { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + deserializer.cursor.read_u32::().unwrap() + } +} + +impl SseDecode for u64 { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + deserializer.cursor.read_u64::().unwrap() + } +} + +impl SseDecode for u8 { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + deserializer.cursor.read_u8().unwrap() + } +} + +impl SseDecode for [u8; 32] { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut inner = >::sse_decode(deserializer); + return flutter_rust_bridge::for_generated::from_vec_to_array(inner); + } +} + +impl SseDecode for () { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self {} +} + +impl SseDecode for usize { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + deserializer.cursor.read_u64::().unwrap() as _ + } +} + +impl SseDecode for crate::api::wormhole::WormholeError { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_message = ::sse_decode(deserializer); + return crate::api::wormhole::WormholeError { + message: var_message, + }; + } +} + +impl SseDecode for crate::api::wormhole::WormholePairResult { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_address = ::sse_decode(deserializer); + let mut var_addressHex = ::sse_decode(deserializer); + let mut var_firstHashSs58 = ::sse_decode(deserializer); + let mut var_firstHashHex = ::sse_decode(deserializer); + let mut var_secretHex = ::sse_decode(deserializer); + return crate::api::wormhole::WormholePairResult { + address: var_address, + address_hex: var_addressHex, + first_hash_ss58: var_firstHashSs58, + first_hash_hex: var_firstHashHex, + secret_hex: var_secretHex, + }; + } +} + +impl SseDecode for crate::api::wormhole::WormholeProofGenerator { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_binsDir = ::sse_decode(deserializer); + return crate::api::wormhole::WormholeProofGenerator { + bins_dir: var_binsDir, + }; + } +} + +impl SseDecode for crate::api::wormhole::WormholeUtxo { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + let mut var_secretHex = ::sse_decode(deserializer); + let mut var_amount = ::sse_decode(deserializer); + let mut var_transferCount = ::sse_decode(deserializer); + let mut var_fundingAccountHex = ::sse_decode(deserializer); + let mut var_blockHashHex = ::sse_decode(deserializer); + return crate::api::wormhole::WormholeUtxo { + secret_hex: var_secretHex, + amount: var_amount, + transfer_count: var_transferCount, + funding_account_hex: var_fundingAccountHex, + block_hash_hex: var_blockHashHex, + }; + } +} + +impl SseDecode for i32 { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { + deserializer.cursor.read_i32::().unwrap() + } +} + +fn pde_ffi_dispatcher_primary_impl( + func_id: i32, + port: flutter_rust_bridge::for_generated::MessagePort, + ptr: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len: i32, + data_len: i32, +) { + // Codec=Pde (Serialization + dispatch), see doc to use other codecs + match func_id { + 1 => wire__crate__api__wormhole__WormholeProofAggregator_add_proof_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 2 => wire__crate__api__wormhole__WormholeProofAggregator_aggregate_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 3 => wire__crate__api__wormhole__WormholeProofAggregator_batch_size_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 4 => wire__crate__api__wormhole__WormholeProofAggregator_clear_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 5 => wire__crate__api__wormhole__WormholeProofAggregator_new_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 6 => wire__crate__api__wormhole__WormholeProofAggregator_proof_count_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 8 => { + wire__crate__api__wormhole__circuit_config_load_impl(port, ptr, rust_vec_len, data_len) + } + 13 => wire__crate__api__wormhole__create_proof_aggregator_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 14 => wire__crate__api__wormhole__create_proof_generator_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 26 => wire__crate__api__wormhole__generate_circuit_binaries_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 32 => wire__crate__api__crypto__init_app_impl(port, ptr, rust_vec_len, data_len), + 45 => wire__crate__api__wormhole__wormhole_proof_generator_generate_proof_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + 46 => wire__crate__api__wormhole__wormhole_proof_generator_new_impl( + port, + ptr, + rust_vec_len, + data_len, + ), + _ => unreachable!(), + } +} + +fn pde_ffi_dispatcher_sync_impl( + func_id: i32, + ptr: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, + rust_vec_len: i32, + data_len: i32, +) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { + // Codec=Pde (Serialization + dispatch), see doc to use other codecs + match func_id { + 7 => wire__crate__api__wormhole__check_circuit_binaries_exist_impl( + ptr, + rust_vec_len, + data_len, + ), + 9 => wire__crate__api__wormhole__compute_block_hash_impl(ptr, rust_vec_len, data_len), + 10 => wire__crate__api__wormhole__compute_nullifier_impl(ptr, rust_vec_len, data_len), + 11 => wire__crate__api__wormhole__compute_output_amount_impl(ptr, rust_vec_len, data_len), + 12 => wire__crate__api__wormhole__compute_transfer_proof_storage_key_impl( + ptr, + rust_vec_len, + data_len, + ), + 15 => wire__crate__api__crypto__crystal_alice_impl(ptr, rust_vec_len, data_len), + 16 => wire__crate__api__crypto__crystal_bob_impl(ptr, rust_vec_len, data_len), + 17 => wire__crate__api__crypto__crystal_charlie_impl(ptr, rust_vec_len, data_len), + 18 => wire__crate__api__ur__decode_ur_impl(ptr, rust_vec_len, data_len), + 19 => wire__crate__api__wormhole__dequantize_amount_impl(ptr, rust_vec_len, data_len), + 20 => { + wire__crate__api__wormhole__derive_address_from_secret_impl(ptr, rust_vec_len, data_len) + } + 21 => wire__crate__api__crypto__derive_hd_path_impl(ptr, rust_vec_len, data_len), + 22 => wire__crate__api__wormhole__derive_wormhole_pair_impl(ptr, rust_vec_len, data_len), + 23 => wire__crate__api__wormhole__encode_digest_from_rpc_logs_impl( + ptr, + rust_vec_len, + data_len, + ), + 24 => wire__crate__api__ur__encode_ur_impl(ptr, rust_vec_len, data_len), + 25 => wire__crate__api__wormhole__first_hash_to_address_impl(ptr, rust_vec_len, data_len), + 27 => wire__crate__api__crypto__generate_derived_keypair_impl(ptr, rust_vec_len, data_len), + 28 => wire__crate__api__crypto__generate_keypair_impl(ptr, rust_vec_len, data_len), + 29 => { + wire__crate__api__crypto__generate_keypair_from_seed_impl(ptr, rust_vec_len, data_len) + } + 30 => { + wire__crate__api__wormhole__get_aggregation_batch_size_impl(ptr, rust_vec_len, data_len) + } + 31 => wire__crate__api__wormhole__get_wormhole_derivation_path_impl( + ptr, + rust_vec_len, + data_len, + ), + 33 => wire__crate__api__ur__is_complete_ur_impl(ptr, rust_vec_len, data_len), + 34 => wire__crate__api__crypto__public_key_bytes_impl(ptr, rust_vec_len, data_len), + 35 => wire__crate__api__wormhole__quantize_amount_impl(ptr, rust_vec_len, data_len), + 36 => wire__crate__api__crypto__secret_key_bytes_impl(ptr, rust_vec_len, data_len), + 37 => wire__crate__api__crypto__set_default_ss58_prefix_impl(ptr, rust_vec_len, data_len), + 38 => wire__crate__api__crypto__sign_message_impl(ptr, rust_vec_len, data_len), + 39 => wire__crate__api__crypto__sign_message_with_pubkey_impl(ptr, rust_vec_len, data_len), + 40 => wire__crate__api__crypto__signature_bytes_impl(ptr, rust_vec_len, data_len), + 41 => wire__crate__api__crypto__ss58_to_account_id_impl(ptr, rust_vec_len, data_len), + 42 => wire__crate__api__crypto__to_account_id_impl(ptr, rust_vec_len, data_len), + 43 => wire__crate__api__crypto__verify_message_impl(ptr, rust_vec_len, data_len), + 44 => wire__crate__api__wormhole__wormhole_error_to_display_string_impl( + ptr, + rust_vec_len, + data_len, + ), + _ => unreachable!(), + } +} + +// Section: rust2dart + +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for FrbWrapper { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + flutter_rust_bridge::for_generated::rust_auto_opaque_encode::<_, MoiArc<_>>(self.0) + .into_dart() + } +} +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive for FrbWrapper {} + +impl flutter_rust_bridge::IntoIntoDart> for HDLatticeError { + fn into_into_dart(self) -> FrbWrapper { + self.into() + } +} + +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for FrbWrapper { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + flutter_rust_bridge::for_generated::rust_auto_opaque_encode::<_, MoiArc<_>>(self.0) + .into_dart() + } +} +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for FrbWrapper +{ +} + +impl flutter_rust_bridge::IntoIntoDart> + for WormholeProofAggregator +{ + fn into_into_dart(self) -> FrbWrapper { + self.into() + } +} + +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::AggregatedProof { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [ + self.proof_hex.into_into_dart().into_dart(), + self.num_real_proofs.into_into_dart().into_dart(), + ] + .into_dart() + } +} +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::AggregatedProof +{ +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::AggregatedProof +{ + fn into_into_dart(self) -> crate::api::wormhole::AggregatedProof { + self + } +} +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::BlockHeaderData { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [ + self.parent_hash_hex.into_into_dart().into_dart(), + self.state_root_hex.into_into_dart().into_dart(), + self.extrinsics_root_hex.into_into_dart().into_dart(), + self.block_number.into_into_dart().into_dart(), + self.digest_hex.into_into_dart().into_dart(), + ] + .into_dart() + } +} +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::BlockHeaderData +{ +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::BlockHeaderData +{ + fn into_into_dart(self) -> crate::api::wormhole::BlockHeaderData { + self + } +} +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::CircuitConfig { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [self.num_leaf_proofs.into_into_dart().into_dart()].into_dart() + } +} +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::CircuitConfig +{ +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::CircuitConfig +{ + fn into_into_dart(self) -> crate::api::wormhole::CircuitConfig { + self + } +} +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::CircuitGenerationResult { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [ + self.success.into_into_dart().into_dart(), + self.error.into_into_dart().into_dart(), + self.output_dir.into_into_dart().into_dart(), + ] + .into_dart() + } +} +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::CircuitGenerationResult +{ +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::CircuitGenerationResult +{ + fn into_into_dart(self) -> crate::api::wormhole::CircuitGenerationResult { + self + } +} +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::GeneratedProof { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [ + self.proof_hex.into_into_dart().into_dart(), + self.nullifier_hex.into_into_dart().into_dart(), + ] + .into_dart() + } +} +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::GeneratedProof +{ +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::GeneratedProof +{ + fn into_into_dart(self) -> crate::api::wormhole::GeneratedProof { + self + } +} +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::crypto::Keypair { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [ + self.public_key.into_into_dart().into_dart(), + self.secret_key.into_into_dart().into_dart(), + ] + .into_dart() } } - -impl SseDecode for Vec { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - let mut len_ = ::sse_decode(deserializer); - let mut ans_ = vec![]; - for idx_ in 0..len_ { - ans_.push(::sse_decode(deserializer)); - } - return ans_; +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive for crate::api::crypto::Keypair {} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::crypto::Keypair +{ + fn into_into_dart(self) -> crate::api::crypto::Keypair { + self } } - -impl SseDecode for Vec { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - let mut len_ = ::sse_decode(deserializer); - let mut ans_ = vec![]; - for idx_ in 0..len_ { - ans_.push(::sse_decode(deserializer)); - } - return ans_; +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::ProofOutputAssignment { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [ + self.output_amount_1.into_into_dart().into_dart(), + self.exit_account_1.into_into_dart().into_dart(), + self.output_amount_2.into_into_dart().into_dart(), + self.exit_account_2.into_into_dart().into_dart(), + ] + .into_dart() } } - -impl SseDecode for Option<[u8; 32]> { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - if (::sse_decode(deserializer)) { - return Some(<[u8; 32]>::sse_decode(deserializer)); - } else { - return None; - } +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::ProofOutputAssignment +{ +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::ProofOutputAssignment +{ + fn into_into_dart(self) -> crate::api::wormhole::ProofOutputAssignment { + self } } - -impl SseDecode for u16 { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - deserializer.cursor.read_u16::().unwrap() +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::StorageProofData { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [ + self.proof_nodes_hex.into_into_dart().into_dart(), + self.state_root_hex.into_into_dart().into_dart(), + ] + .into_dart() } } - -impl SseDecode for u8 { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - deserializer.cursor.read_u8().unwrap() +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::StorageProofData +{ +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::StorageProofData +{ + fn into_into_dart(self) -> crate::api::wormhole::StorageProofData { + self } } - -impl SseDecode for [u8; 32] { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - let mut inner = >::sse_decode(deserializer); - return flutter_rust_bridge::for_generated::from_vec_to_array(inner); +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::WormholeError { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [self.message.into_into_dart().into_dart()].into_dart() } } - -impl SseDecode for () { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self {} +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::WormholeError +{ } - -impl SseDecode for usize { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - deserializer.cursor.read_u64::().unwrap() as _ +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::WormholeError +{ + fn into_into_dart(self) -> crate::api::wormhole::WormholeError { + self } } - -impl SseDecode for i32 { - // Codec=Sse (Serialization based), see doc to use other codecs - fn sse_decode(deserializer: &mut flutter_rust_bridge::for_generated::SseDeserializer) -> Self { - deserializer.cursor.read_i32::().unwrap() +// Codec=Dco (DartCObject based), see doc to use other codecs +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::WormholePairResult { + fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { + [ + self.address.into_into_dart().into_dart(), + self.address_hex.into_into_dart().into_dart(), + self.first_hash_ss58.into_into_dart().into_dart(), + self.first_hash_hex.into_into_dart().into_dart(), + self.secret_hex.into_into_dart().into_dart(), + ] + .into_dart() } } - -fn pde_ffi_dispatcher_primary_impl( - func_id: i32, - port: flutter_rust_bridge::for_generated::MessagePort, - ptr: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, - rust_vec_len: i32, - data_len: i32, -) { - // Codec=Pde (Serialization + dispatch), see doc to use other codecs - match func_id { - 10 => wire__crate__api__crypto__init_app_impl(port, ptr, rust_vec_len, data_len), - _ => unreachable!(), - } +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::WormholePairResult +{ } - -fn pde_ffi_dispatcher_sync_impl( - func_id: i32, - ptr: flutter_rust_bridge::for_generated::PlatformGeneralizedUint8ListPtr, - rust_vec_len: i32, - data_len: i32, -) -> flutter_rust_bridge::for_generated::WireSyncRust2DartSse { - // Codec=Pde (Serialization + dispatch), see doc to use other codecs - match func_id { - 1 => wire__crate__api__crypto__crystal_alice_impl(ptr, rust_vec_len, data_len), - 2 => wire__crate__api__crypto__crystal_bob_impl(ptr, rust_vec_len, data_len), - 3 => wire__crate__api__crypto__crystal_charlie_impl(ptr, rust_vec_len, data_len), - 4 => wire__crate__api__ur__decode_ur_impl(ptr, rust_vec_len, data_len), - 5 => wire__crate__api__crypto__derive_hd_path_impl(ptr, rust_vec_len, data_len), - 6 => wire__crate__api__ur__encode_ur_impl(ptr, rust_vec_len, data_len), - 7 => wire__crate__api__crypto__generate_derived_keypair_impl(ptr, rust_vec_len, data_len), - 8 => wire__crate__api__crypto__generate_keypair_impl(ptr, rust_vec_len, data_len), - 9 => wire__crate__api__crypto__generate_keypair_from_seed_impl(ptr, rust_vec_len, data_len), - 11 => wire__crate__api__ur__is_complete_ur_impl(ptr, rust_vec_len, data_len), - 12 => wire__crate__api__crypto__public_key_bytes_impl(ptr, rust_vec_len, data_len), - 13 => wire__crate__api__crypto__secret_key_bytes_impl(ptr, rust_vec_len, data_len), - 14 => wire__crate__api__crypto__set_default_ss58_prefix_impl(ptr, rust_vec_len, data_len), - 15 => wire__crate__api__crypto__sign_message_impl(ptr, rust_vec_len, data_len), - 16 => wire__crate__api__crypto__sign_message_with_pubkey_impl(ptr, rust_vec_len, data_len), - 17 => wire__crate__api__crypto__signature_bytes_impl(ptr, rust_vec_len, data_len), - 18 => wire__crate__api__crypto__ss58_to_account_id_impl(ptr, rust_vec_len, data_len), - 19 => wire__crate__api__crypto__to_account_id_impl(ptr, rust_vec_len, data_len), - 20 => wire__crate__api__crypto__verify_message_impl(ptr, rust_vec_len, data_len), - _ => unreachable!(), +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::WormholePairResult +{ + fn into_into_dart(self) -> crate::api::wormhole::WormholePairResult { + self } } - -// Section: rust2dart - // Codec=Dco (DartCObject based), see doc to use other codecs -impl flutter_rust_bridge::IntoDart for FrbWrapper { +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::WormholeProofGenerator { fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { - flutter_rust_bridge::for_generated::rust_auto_opaque_encode::<_, MoiArc<_>>(self.0) - .into_dart() + [self.bins_dir.into_into_dart().into_dart()].into_dart() } } -impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive for FrbWrapper {} - -impl flutter_rust_bridge::IntoIntoDart> for HDLatticeError { - fn into_into_dart(self) -> FrbWrapper { - self.into() +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::WormholeProofGenerator +{ +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::WormholeProofGenerator +{ + fn into_into_dart(self) -> crate::api::wormhole::WormholeProofGenerator { + self } } - // Codec=Dco (DartCObject based), see doc to use other codecs -impl flutter_rust_bridge::IntoDart for crate::api::crypto::Keypair { +impl flutter_rust_bridge::IntoDart for crate::api::wormhole::WormholeUtxo { fn into_dart(self) -> flutter_rust_bridge::for_generated::DartAbi { [ - self.public_key.into_into_dart().into_dart(), - self.secret_key.into_into_dart().into_dart(), + self.secret_hex.into_into_dart().into_dart(), + self.amount.into_into_dart().into_dart(), + self.transfer_count.into_into_dart().into_dart(), + self.funding_account_hex.into_into_dart().into_dart(), + self.block_hash_hex.into_into_dart().into_dart(), ] .into_dart() } } -impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive for crate::api::crypto::Keypair {} -impl flutter_rust_bridge::IntoIntoDart - for crate::api::crypto::Keypair +impl flutter_rust_bridge::for_generated::IntoDartExceptPrimitive + for crate::api::wormhole::WormholeUtxo { - fn into_into_dart(self) -> crate::api::crypto::Keypair { +} +impl flutter_rust_bridge::IntoIntoDart + for crate::api::wormhole::WormholeUtxo +{ + fn into_into_dart(self) -> crate::api::wormhole::WormholeUtxo { self } } @@ -891,6 +2420,18 @@ impl SseEncode for HDLatticeError { } } +impl SseEncode for WormholeProofAggregator { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + , + >>::sse_encode( + flutter_rust_bridge::for_generated::rust_auto_opaque_encode::<_, MoiArc<_>>(self), + serializer, + ); + } +} + impl SseEncode for RustOpaqueMoi> { @@ -902,6 +2443,19 @@ impl SseEncode } } +impl SseEncode + for RustOpaqueMoi< + flutter_rust_bridge::for_generated::RustAutoOpaqueInner, + > +{ + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + let (ptr, size) = self.sse_encode_raw(); + ::sse_encode(ptr, serializer); + ::sse_encode(size, serializer); + } +} + impl SseEncode for String { // Codec=Sse (Serialization based), see doc to use other codecs fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { @@ -909,6 +2463,25 @@ impl SseEncode for String { } } +impl SseEncode for crate::api::wormhole::AggregatedProof { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.proof_hex, serializer); + ::sse_encode(self.num_real_proofs, serializer); + } +} + +impl SseEncode for crate::api::wormhole::BlockHeaderData { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.parent_hash_hex, serializer); + ::sse_encode(self.state_root_hex, serializer); + ::sse_encode(self.extrinsics_root_hex, serializer); + ::sse_encode(self.block_number, serializer); + ::sse_encode(self.digest_hex, serializer); + } +} + impl SseEncode for bool { // Codec=Sse (Serialization based), see doc to use other codecs fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { @@ -916,6 +2489,30 @@ impl SseEncode for bool { } } +impl SseEncode for crate::api::wormhole::CircuitConfig { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.num_leaf_proofs, serializer); + } +} + +impl SseEncode for crate::api::wormhole::CircuitGenerationResult { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.success, serializer); + >::sse_encode(self.error, serializer); + >::sse_encode(self.output_dir, serializer); + } +} + +impl SseEncode for crate::api::wormhole::GeneratedProof { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.proof_hex, serializer); + ::sse_encode(self.nullifier_hex, serializer); + } +} + impl SseEncode for crate::api::crypto::Keypair { // Codec=Sse (Serialization based), see doc to use other codecs fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { @@ -944,6 +2541,16 @@ impl SseEncode for Vec { } } +impl SseEncode for Option { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.is_some(), serializer); + if let Some(value) = self { + ::sse_encode(value, serializer); + } + } +} + impl SseEncode for Option<[u8; 32]> { // Codec=Sse (Serialization based), see doc to use other codecs fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { @@ -954,6 +2561,24 @@ impl SseEncode for Option<[u8; 32]> { } } +impl SseEncode for crate::api::wormhole::ProofOutputAssignment { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.output_amount_1, serializer); + ::sse_encode(self.exit_account_1, serializer); + ::sse_encode(self.output_amount_2, serializer); + ::sse_encode(self.exit_account_2, serializer); + } +} + +impl SseEncode for crate::api::wormhole::StorageProofData { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + >::sse_encode(self.proof_nodes_hex, serializer); + ::sse_encode(self.state_root_hex, serializer); + } +} + impl SseEncode for u16 { // Codec=Sse (Serialization based), see doc to use other codecs fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { @@ -961,6 +2586,20 @@ impl SseEncode for u16 { } } +impl SseEncode for u32 { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + serializer.cursor.write_u32::(self).unwrap(); + } +} + +impl SseEncode for u64 { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + serializer.cursor.write_u64::(self).unwrap(); + } +} + impl SseEncode for u8 { // Codec=Sse (Serialization based), see doc to use other codecs fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { @@ -996,6 +2635,42 @@ impl SseEncode for usize { } } +impl SseEncode for crate::api::wormhole::WormholeError { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.message, serializer); + } +} + +impl SseEncode for crate::api::wormhole::WormholePairResult { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.address, serializer); + ::sse_encode(self.address_hex, serializer); + ::sse_encode(self.first_hash_ss58, serializer); + ::sse_encode(self.first_hash_hex, serializer); + ::sse_encode(self.secret_hex, serializer); + } +} + +impl SseEncode for crate::api::wormhole::WormholeProofGenerator { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.bins_dir, serializer); + } +} + +impl SseEncode for crate::api::wormhole::WormholeUtxo { + // Codec=Sse (Serialization based), see doc to use other codecs + fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { + ::sse_encode(self.secret_hex, serializer); + ::sse_encode(self.amount, serializer); + ::sse_encode(self.transfer_count, serializer); + ::sse_encode(self.funding_account_hex, serializer); + ::sse_encode(self.block_hash_hex, serializer); + } +} + impl SseEncode for i32 { // Codec=Sse (Serialization based), see doc to use other codecs fn sse_encode(self, serializer: &mut flutter_rust_bridge::for_generated::SseSerializer) { @@ -1012,6 +2687,7 @@ mod io { use super::*; use crate::api::crypto::*; + use crate::api::wormhole::*; use flutter_rust_bridge::for_generated::byteorder::{ NativeEndian, ReadBytesExt, WriteBytesExt, }; @@ -1035,6 +2711,20 @@ mod io { ) { MoiArc::>::decrement_strong_count(ptr as _); } + + #[unsafe(no_mangle)] + pub extern "C" fn frbgen_quantus_sdk_rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ptr: *const std::ffi::c_void, + ) { + MoiArc::>::increment_strong_count(ptr as _); + } + + #[unsafe(no_mangle)] + pub extern "C" fn frbgen_quantus_sdk_rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ptr: *const std::ffi::c_void, + ) { + MoiArc::>::decrement_strong_count(ptr as _); + } } #[cfg(not(target_family = "wasm"))] pub use io::*; @@ -1049,6 +2739,7 @@ mod web { use super::*; use crate::api::crypto::*; + use crate::api::wormhole::*; use flutter_rust_bridge::for_generated::byteorder::{ NativeEndian, ReadBytesExt, WriteBytesExt, }; @@ -1074,6 +2765,20 @@ mod web { ) { MoiArc::>::decrement_strong_count(ptr as _); } + + #[wasm_bindgen] + pub fn rust_arc_increment_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ptr: *const std::ffi::c_void, + ) { + MoiArc::>::increment_strong_count(ptr as _); + } + + #[wasm_bindgen] + pub fn rust_arc_decrement_strong_count_RustOpaque_flutter_rust_bridgefor_generatedRustAutoOpaqueInnerWormholeProofAggregator( + ptr: *const std::ffi::c_void, + ) { + MoiArc::>::decrement_strong_count(ptr as _); + } } #[cfg(target_family = "wasm")] pub use web::*;