diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000..8ec1fbb8 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,189 @@ +# Copilot Instructions for Toban Project + +## Project Overview +Toban is a role-based rewards distribution system built on blockchain technology. It simplifies contribution tracking and reward distribution for collaborative projects using Hats Protocol, Splits, and custom smart contracts. + +## Architecture & Technology Stack + +### Monorepo Structure (pnpm workspace) +- `pkgs/contract/` - Solidity smart contracts (Hardhat) +- `pkgs/frontend/` - React app (Remix + Vite + Chakra UI) +- `pkgs/subgraph/` - The Graph Protocol indexing +- `pkgs/cli/` - TypeScript CLI tools +- `pkgs/document/` - Docusaurus documentation + +### Key Technologies +- **Smart Contracts**: Solidity ^0.8.24, Hardhat, OpenZeppelin +- **Frontend**: React, Remix, Vite, Chakra UI, Privy (Web3 auth) +- **Backend**: The Graph Protocol, Apollo Client (GraphQL) +- **Deployment**: Sepolia (testnet), Base (mainnet) + +## Development Guidelines + +### Code Style & Conventions +- Use **Biome** for formatting and linting (not ESLint/Prettier) +- **TypeScript strict mode** throughout the project +- **PascalCase** for contracts and components, **camelCase** for functions/variables +- Use **pnpm** for package management (never npm or yarn) + +### Smart Contract Development +- Follow **UUPS upgradeable pattern** for contracts +- Use **OpenZeppelin** contracts for security +- Write comprehensive **NatSpec documentation** +- Always include **test coverage** for new functions +- Use **Hardhat** tasks for contract interactions + +### Frontend Development +- Use **Chakra UI** components consistently +- Implement **responsive design** patterns +- Handle **Web3 connection states** properly with Privy +- Use **Apollo Client** for GraphQL queries +- Follow **Remix** conventions for routing and data loading + +### Key Smart Contracts +- `BigBang.sol` - Main project initialization contract +- `HatsTimeFrameModule.sol` - Time-based role management +- `HatsHatCreatorModule.sol` - Dynamic role creation +- `SplitsCreator.sol` - Reward distribution mechanism +- `FractionToken.sol` - Thanks Token (Assist Credit) implementation + +## Command Patterns + +### Package-specific Commands +```bash +# Frontend development +pnpm frontend dev +pnpm frontend build +pnpm frontend test:e2e:dev + +# Contract development +pnpm contract compile +pnpm contract test +pnpm contract deploy:all +pnpm contract coverage + +# Code quality +pnpm biome:format +pnpm biome:check +``` + +### Common Development Tasks +- **New contract**: Create in appropriate `/contracts/` subdirectory with tests +- **Frontend components**: Place in `/app/components/` with proper typing +- **GraphQL queries**: Use codegen with `pnpm frontend codegen` +- **Documentation**: Update relevant `/docs/` files + +## Important Patterns & Best Practices + +### Smart Contract Patterns +- Always use **initializer** functions instead of constructors for upgradeable contracts +- Implement proper **access control** using OpenZeppelin's AccessControl +- Use **events** for important state changes +- Include **input validation** and **error handling** + +### Frontend Patterns +- Use **React hooks** for state management +- Implement **loading states** for blockchain interactions +- Handle **transaction errors** gracefully +- Use **TypeScript interfaces** for all data structures + +### Security Considerations +- **Validate all inputs** in smart contracts +- Use **reentrancy guards** where appropriate +- Implement **proper access controls** +- **Test edge cases** thoroughly +- Follow **principle of least privilege** + +## Project-Specific Context + +### Core Domain Concepts +- **Roles**: Managed via Hats Protocol for permissions and responsibilities +- **Thanks Tokens**: P2P transferable tokens for contribution tracking +- **Splits**: Automated reward distribution based on contribution +- **Time Frames**: Period-based role assignments and reward calculations +- **Workspaces**: Project containers with their own governance + +### Key User Flows +1. **Workspace Creation**: BigBang contract initialization +2. **Role Management**: Hat creation and assignment +3. **Contribution Tracking**: Thanks Token transfers +4. **Reward Distribution**: Split creation and execution + +### Testing Strategy +- **Unit tests** for all smart contracts +- **Integration tests** for contract interactions +- **E2E tests** using Cypress for critical user flows +- **Coverage reports** for contract code + +## Environment & Deployment +- **Development**: Local Hardhat network +- **Testnet**: Sepolia with deployed contract addresses +- **Production**: Base network +- **Frontend**: https://toban.xyz + +## Serena MCP Integration + +### Project Management with Serena +This project uses **Serena MCP** for enhanced development workflow and project management: + +#### Essential Serena Workflow +1. **Project Activation**: Always start with `mcp_serena_activate_project` for the toban project +2. **Symbol Discovery**: Use `mcp_serena_find_symbol` for code exploration and understanding +3. **Code Editing**: Prefer `mcp_serena_replace_symbol_body` for symbol-level edits +4. **Memory System**: Leverage project memories for context retention across sessions + +#### Serena Best Practices +- **Symbol-first approach**: Use symbol-based tools (`find_symbol`, `replace_symbol_body`) over regex when possible +- **Memory utilization**: Check `mcp_serena_list_memories` and `mcp_serena_read_memory` for project context +- **Pattern searching**: Use `mcp_serena_search_for_pattern` for cross-codebase searches +- **Code references**: Use `mcp_serena_find_referencing_symbols` to understand symbol usage + +#### Available Memories +The project maintains memories for: +- Project overview and architecture +- Development commands and workflows +- Code style conventions +- macOS environment specifics +- Task completion checklists + +## When Helping with Code + +1. **Start with Serena activation** - Always activate the toban project first +2. **Use symbol-based operations** - Prefer Serena's symbol tools for precise code manipulation +3. **Leverage project memories** - Check existing memories for context before starting tasks +4. **Consider monorepo structure** - Commands should be run with pnpm workspace syntax +5. **Prioritize type safety** - Use TypeScript strictly throughout +6. **Follow existing patterns** - Maintain consistency with current codebase +7. **Consider gas optimization** for smart contracts +8. **Implement proper error handling** for Web3 interactions +9. **Write tests** for new functionality +10. **Update documentation** when adding new features +11. **Think before acting** - Use `mcp_serena_think_about_task_adherence` for complex tasks + +## Development Workflow with Serena + +### For New Features +1. Activate project with Serena +2. Use `mcp_serena_find_symbol` to understand existing code structure +3. Use `mcp_serena_search_for_pattern` to find similar implementations +4. Implement using symbol-based editing tools +5. Use `mcp_serena_think_about_whether_you_are_done` to validate completion + +### For Bug Fixes +1. Activate project and check relevant memories +2. Use `mcp_serena_find_referencing_symbols` to understand impact +3. Use symbol-based tools for precise fixes +4. Validate changes don't break existing functionality + +### For Code Exploration +1. Start with `mcp_serena_get_symbols_overview` for file understanding +2. Use `mcp_serena_find_symbol` with depth parameter for detailed exploration +3. Leverage `mcp_serena_search_for_pattern` for cross-file analysis + +## Common Issues & Solutions +- **MetaMask connection issues**: Check Privy configuration +- **Contract deployment failures**: Verify network configuration and gas settings +- **GraphQL schema mismatches**: Run codegen after subgraph updates +- **Build failures**: Ensure all dependencies are installed with pnpm install +- **Serena symbol not found**: Use pattern search or check file structure with `mcp_serena_list_dir` +- **Memory context missing**: Create new memories with `mcp_serena_write_memory` for future reference diff --git a/.vscode/config/.env.example b/.vscode/config/.env.example new file mode 100644 index 00000000..6e948761 --- /dev/null +++ b/.vscode/config/.env.example @@ -0,0 +1 @@ +GITHUB_PERSONAL_ACCESS_TOKEN= diff --git a/.vscode/mcp.json b/.vscode/mcp.json new file mode 100644 index 00000000..32d39ec7 --- /dev/null +++ b/.vscode/mcp.json @@ -0,0 +1,36 @@ +{ + "inputs": [], + "servers": { + "context7": { + "type": "http", + "url": "https://mcp.context7.com/mcp" + }, + "sequential-thinking": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"] + }, + "serena": { + "command": "uvx", + "args": [ + "--from", + "git+https://github.com/oraios/serena", + "serena", + "start-mcp-server", + "--context", + "ide-assistant" + ] + }, + "github": { + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "-e", + "GITHUB_PERSONAL_ACCESS_TOKEN", + "ghcr.io/github/github-mcp-server" + ], + "envFile": "${workspaceFolder}/.vscode/config/.env" + } + } +} diff --git a/README.md b/README.md index 546a4725..52ad8a57 100644 --- a/README.md +++ b/README.md @@ -228,18 +228,6 @@ These solutions were combined with ideas from [Hats Protocol](https://www.hatspr pnpm contract getChainInfo --network sepolia ``` - - #### **deploy Sample Contract** - - ```bash - pnpm contract deploy:Lock --network sepolia - ``` - - - #### **verify deployed contract** - - ```bash - pnpm contract deploy:Lock --verify --network sepolia - ``` - - #### **deploy all contract** ```bash @@ -272,10 +260,19 @@ These solutions were combined with ideas from [Hats Protocol](https://www.hatspr - #### **call bigbang task** - Please set params when you execute. + ワークスペースを作成する際に実行されるメソッド. + 必要なコントラクトが一式デプロイされる ```bash - pnpm contract bigbang --owner 0x51908F598A5e0d8F1A3bAbFa6DF76F9704daD072 --tophatdetails "tophatDetails" --tophatimageuri "tophatURI" --hatterhatdetails "hatterhatURI" --hatterhatimageuri "tophatDetails" --forwarder 0x51908F598A5e0d8F1A3bAbFa6DF76F9704daD072 --network sepolia + pnpm contract bigbang \ + --owner 0x51908F598A5e0d8F1A3bAbFa6DF76F9704daD072 \ + --tophatdetails "Dev Top Hat" \ + --tophatimageuri "dev-tophat" \ + --hatterhatdetails "Dev Hatter Hat" \ + --hatterhatimageuri "dev-hatterhat" \ + --memberhatdetails "Dev Member Hat" \ + --memberhatimageuri "dev-memberhat" \ + --network sepolia ``` - #### **call getWoreTime task** @@ -287,5 +284,13 @@ These solutions were combined with ideas from [Hats Protocol](https://www.hatspr - #### **call mintHat task** ```bash - pnpm contract mintHat --hatid 17011726346972053710434886519909386955065038130623101235576378067255296 --wearer 0x1295BDc0C102EB105dC0198fdC193588fe66A1e4 --network sepolia + # --module には bigBangコントラクトの時に出力される hatsTimeFrameModule のアドレスを当てはめること! + pnpm contract mintHat --hatid 39145842972085145413893403125858635166881967613628980006401871953526784 --wearer 0xEef377Bdf67A227a744e386231fB3f264C158CDF --module 0xA193a4CE929168A594744A53Fb17Ba4caBb507a4 --network sepolia + ``` + + - #### **call batchMintHats task** + + ```bash + # --module には bigBangコントラクトの時に出力される hatsTimeFrameModule のアドレスを当てはめること! + pnpm contract batchMintHat --hatid 39145842972085145413893403125858635166881967613628980006401871953526784 --csv ./data/example-wearers.csv --module 0xA193a4CE929168A594744A53Fb17Ba4caBb507a4 --network sepolia ``` diff --git a/pkgs/contract/contracts/hatsmodules/timeframe/HatsTimeFrameModule.sol b/pkgs/contract/contracts/hatsmodules/timeframe/HatsTimeFrameModule.sol index 3cf2fb17..5889dc76 100644 --- a/pkgs/contract/contracts/hatsmodules/timeframe/HatsTimeFrameModule.sol +++ b/pkgs/contract/contracts/hatsmodules/timeframe/HatsTimeFrameModule.sol @@ -70,6 +70,46 @@ contract HatsTimeFrameModule is HatsModule, IHatsTimeFrameModule { emit HatMinted(hatId, wearer, time == 0 ? block.timestamp : time); } + /** + * @dev Batch mint hats for multiple addresses with the same hat ID. + * @param hatId The ID of the hat to be minted for all wearers. + * @param wearers Array of addresses to receive the hat. + * @param times Array of specific timestamps when each hat was minted (0 for current time). + */ + function batchMintHat( + uint256 hatId, + address[] calldata wearers, + uint256[] calldata times + ) external { + // 権限チェック + require(hasAuthority(msg.sender), "Not authorized"); + + // 入力検証 + require(wearers.length > 0, "Empty wearers array"); + require(wearers.length == times.length, "Array length mismatch"); + require(wearers.length <= 100, "Batch size too large"); + + // 事前検証(全てのwearerが有効かチェック) + for (uint256 i = 0; i < wearers.length; i++) { + require(wearers[i] != address(0), "Invalid wearer address"); + require(woreTime[hatId][wearers[i]] == 0, "Hat already minted"); + } + + // 一括ミント実行 + for (uint256 i = 0; i < wearers.length; i++) { + address wearer = wearers[i]; + uint256 time = times[i]; + + _setWoreTime(wearer, hatId, time); + isActive[hatId][wearer] = true; + HATS().mintHat(hatId, wearer); + + emit HatMinted(hatId, wearer, time == 0 ? block.timestamp : time); + } + + emit BatchMintCompleted(hatId, wearers.length); + } + /** * @dev Deactivate the hat, pausing the contribution time. * Calculate the contribution time up to deactivation. diff --git a/pkgs/contract/contracts/hatsmodules/timeframe/IHatsTimeFrameModule.sol b/pkgs/contract/contracts/hatsmodules/timeframe/IHatsTimeFrameModule.sol index 9b03ed1b..385baac0 100644 --- a/pkgs/contract/contracts/hatsmodules/timeframe/IHatsTimeFrameModule.sol +++ b/pkgs/contract/contracts/hatsmodules/timeframe/IHatsTimeFrameModule.sol @@ -31,6 +31,18 @@ interface IHatsTimeFrameModule { uint256 hatId ) external view returns (uint256); + /** + * @dev Batch mint hats for multiple addresses with the same hat ID. + * @param hatId The ID of the hat to be minted for all wearers. + * @param wearers Array of addresses to receive the hat. + * @param times Array of specific timestamps when each hat was minted (0 for current time). + */ + function batchMintHat( + uint256 hatId, + address[] calldata wearers, + uint256[] calldata times + ) external; + function woreTime( uint256 hatId, address wearer @@ -84,4 +96,9 @@ interface IHatsTimeFrameModule { * @notice Emitted when a hat is renounced */ event HatRenounced(uint256 indexed hatId, address indexed wearer); + + /** + * @notice Emitted when batch mint is completed + */ + event BatchMintCompleted(uint256 indexed hatId, uint256 count); } diff --git a/pkgs/contract/data/example-wearers.csv b/pkgs/contract/data/example-wearers.csv new file mode 100644 index 00000000..e2d9ebee --- /dev/null +++ b/pkgs/contract/data/example-wearers.csv @@ -0,0 +1,5 @@ +address,timestamp +0x1431ea8af860C3862A919968C71f901aEdE1910E,1640995200 +0xC66a0700d1578C83752E6b7518819abc0ecc4f18, 1640995200 +0x402fb9124a11396D5F7e596855CB59aBEB3459AD,1672531200 +0xef902bbE4967ac7A5Ec22039cA2d994325A36dB9, 1640995200 diff --git a/pkgs/contract/helpers/deploy/contractsJsonHelper.ts b/pkgs/contract/helpers/deploy/contractsJsonHelper.ts index 2145a2a9..981e30c6 100644 --- a/pkgs/contract/helpers/deploy/contractsJsonHelper.ts +++ b/pkgs/contract/helpers/deploy/contractsJsonHelper.ts @@ -48,7 +48,22 @@ const resetContractAddressesJson = ({ network }: { network: string }): void => { const loadDeployedContractAddresses = (network: string) => { const filePath = getFilePath({ network: network }); - return jsonfile.readFileSync(filePath); + const data = jsonfile.readFileSync(filePath); + + // Clean up any double-escaped JSON strings + const cleanData = JSON.parse(JSON.stringify(data), (key, value) => { + if ( + typeof value === "string" && + value.startsWith('"') && + value.endsWith('"') + ) { + // Remove extra quotes from double-escaped strings + return value.slice(1, -1); + } + return value; + }); + + return cleanData; }; const _updateJson = ({ @@ -67,7 +82,9 @@ const _updateJson = ({ obj[group] = value as Record; } else { if (obj[group][name] === undefined) obj[group][name] = ""; - obj[group][name] = JSON.stringify(value); + // Don't double-stringify string values + obj[group][name] = + typeof value === "string" ? value : JSON.stringify(value); } }; diff --git a/pkgs/contract/ignition/deployments/chain-11155111/deployed_addresses.json b/pkgs/contract/ignition/deployments/chain-11155111/deployed_addresses.json deleted file mode 100644 index 45a8f014..00000000 --- a/pkgs/contract/ignition/deployments/chain-11155111/deployed_addresses.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "LockModule#Lock": "0x427e5da5aBd9DdE6272EDb6d7388Fb1C7AB7E519" -} diff --git a/pkgs/contract/ignition/deployments/chain-11155111/journal.jsonl b/pkgs/contract/ignition/deployments/chain-11155111/journal.jsonl deleted file mode 100644 index 56acb793..00000000 --- a/pkgs/contract/ignition/deployments/chain-11155111/journal.jsonl +++ /dev/null @@ -1,7 +0,0 @@ - -{"chainId":11155111,"type":"DEPLOYMENT_INITIALIZE"} -{"artifactId":"LockModule#Lock","constructorArgs":[1893456000],"contractName":"Lock","dependencies":[],"from":"0x51908f598a5e0d8f1a3babfa6df76f9704dad072","futureId":"LockModule#Lock","futureType":"NAMED_ARTIFACT_CONTRACT_DEPLOYMENT","libraries":{},"strategy":"basic","strategyConfig":{},"type":"DEPLOYMENT_EXECUTION_STATE_INITIALIZE","value":{"_kind":"bigint","value":"1000000000000000"}} -{"futureId":"LockModule#Lock","networkInteraction":{"data":"0x60806040526200001862000012620000ef565b620002dd565b6200002262000031565b6104d96200032682396104d990f35b60405190565b601f801991011690565b634e487b7160e01b600052604160045260246000fd5b90620000639062000037565b810190811060018060401b038211176200007c57604052565b62000041565b90620000996200009162000031565b928362000057565b565b600080fd5b90565b620000ae81620000a0565b03620000b657565b600080fd5b90505190620000ca82620000a3565b565b90602082820312620000e957620000e691600001620000bb565b90565b6200009b565b62000112620007ff80380380620001068162000082565b928339810190620000cc565b90565b60209181520190565b60207f7572650000000000000000000000000000000000000000000000000000000000917f556e6c6f636b2074696d652073686f756c6420626520696e207468652066757460008201520152565b6200017b602360409262000115565b62000186816200011e565b0190565b620001a290602081019060008183039101526200016c565b90565b15620001ad57565b620001b762000031565b62461bcd60e51b815280620001cf600482016200018a565b0390fd5b60001b90565b90620001e860001991620001d3565b9181191691161790565b90565b6200020e620002086200021492620000a0565b620001f2565b620000a0565b90565b90565b90620002346200022e6200023c92620001f5565b62000217565b8254620001d9565b9055565b60018060a01b031690565b620002646200025e6200026a9262000240565b620001f2565b62000240565b90565b62000278906200024b565b90565b62000286906200026d565b90565b906200029c60018060a01b0391620001d3565b9181191691161790565b620002b1906200026d565b90565b90565b90620002d1620002cb620002d992620002a6565b620002b4565b825462000289565b9055565b6200030c906200030442620002fd620002f684620000a0565b91620000a0565b10620001a5565b60006200021a565b620003236200031b336200027b565b6001620002b7565b56fe60806040526004361015610013575b6101f2565b61001e60003561004d565b8063251c1aa3146100485780633ccfd60b1461004357638da5cb5b0361000e576101bd565b61010f565b6100d4565b60e01c90565b60405190565b600080fd5b600080fd5b600091031261006e57565b61005e565b1c90565b90565b61008a90600861008f9302610073565b610077565b90565b9061009d915461007a565b90565b6100ab600080610092565b90565b90565b6100ba906100ae565b9052565b91906100d2906000602085019401906100b1565b565b34610104576100e4366004610063565b6101006100ef6100a0565b6100f7610053565b918291826100be565b0390f35b610059565b60000190565b3461013d5761011f366004610063565b6101276103d4565b61012f610053565b8061013981610109565b0390f35b610059565b60018060a01b031690565b61015d9060086101629302610073565b610142565b90565b90610170915461014d565b90565b6101806001600090610165565b90565b60018060a01b031690565b61019790610183565b90565b6101a39061018e565b9052565b91906101bb9060006020850194019061019a565b565b346101ed576101cd366004610063565b6101e96101d8610173565b6101e0610053565b918291826101a7565b0390f35b610059565b600080fd5b60001c90565b61020961020e916101f7565b610077565b90565b61021b90546101fd565b90565b60209181520190565b60007f596f752063616e27742077697468647261772079657400000000000000000000910152565b61025c601660209261021e565b61026581610227565b0190565b61027f906020810190600081830391015261024f565b90565b1561028957565b610291610053565b62461bcd60e51b8152806102a760048201610269565b0390fd5b6102b76102bc916101f7565b610142565b90565b6102c990546102ab565b90565b6102d590610183565b90565b90565b6102ef6102ea6102f492610183565b6102d8565b610183565b90565b610300906102db565b90565b61030c906102f7565b90565b60007f596f75206172656e277420746865206f776e6572000000000000000000000000910152565b610344601460209261021e565b61034d8161030f565b0190565b6103679060208101906000818303910152610337565b90565b1561037157565b610379610053565b62461bcd60e51b81528061038f60048201610351565b0390fd5b61039c906102f7565b90565b9160206103c19294936103ba604082019660008301906100b1565b01906100b1565b565b6103cb610053565b3d6000823e3d90fd5b6103fa426103f36103ed6103e86000610211565b6100ae565b916100ae565b1015610282565b61041f3361041961041361040e60016102bf565b610303565b916102cc565b1461036a565b61042830610393565b31427fbf2ed60bd5b5965d685680c01195c9514e4382e28e3a5a2d2d5244bf59411b9391610460610457610053565b9283928361039f565b0390a1600080808061047a61047560016102bf565b610303565b61048330610393565b319082821561049a575bf11561049557565b6103c3565b506108fc61048d56fea2646970667358221220bcef3682da19e783550f19d53e72751858902e9e4684c00d50dff9b73c9848d764736f6c634300081800330000000000000000000000000000000000000000000000000000000070dbd880","id":1,"type":"ONCHAIN_INTERACTION","value":{"_kind":"bigint","value":"1000000000000000"}},"type":"NETWORK_INTERACTION_REQUEST"} -{"futureId":"LockModule#Lock","networkInteractionId":1,"nonce":210,"transaction":{"fees":{"maxFeePerGas":{"_kind":"bigint","value":"628158438"},"maxPriorityFeePerGas":{"_kind":"bigint","value":"2342368"}},"hash":"0xd78ef893ab193e794fff5c8621797a1090b55b83c58eaf4f3fa31fa61c3fed90"},"type":"TRANSACTION_SEND"} -{"futureId":"LockModule#Lock","hash":"0xd78ef893ab193e794fff5c8621797a1090b55b83c58eaf4f3fa31fa61c3fed90","networkInteractionId":1,"receipt":{"blockHash":"0x5fea4668db8dfda56319f08cfc2ff3271e65d2ec547c3671d0d139deea375019","blockNumber":6693697,"contractAddress":"0x427e5da5aBd9DdE6272EDb6d7388Fb1C7AB7E519","logs":[],"status":"SUCCESS"},"type":"TRANSACTION_CONFIRM"} -{"futureId":"LockModule#Lock","result":{"address":"0x427e5da5aBd9DdE6272EDb6d7388Fb1C7AB7E519","type":"SUCCESS"},"type":"DEPLOYMENT_EXECUTION_STATE_COMPLETE"} \ No newline at end of file diff --git a/pkgs/contract/ignition/modules/Lock.ts b/pkgs/contract/ignition/modules/Lock.ts deleted file mode 100644 index 163f622d..00000000 --- a/pkgs/contract/ignition/modules/Lock.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { buildModule } from "@nomicfoundation/hardhat-ignition/modules"; -import { parseEther } from "viem"; - -const JAN_1ST_2030 = 1893456000; -const ONE_GWEI: bigint = parseEther("0.001"); - -/** - * デプロイスクリプト - */ -const LockModule = buildModule("LockModule", (m) => { - const unlockTime = m.getParameter("unlockTime", JAN_1ST_2030); - const lockedAmount = m.getParameter("lockedAmount", ONE_GWEI); - - const lock = m.contract("Lock", [unlockTime], { - value: lockedAmount, - }); - - return { lock }; -}); - -export default LockModule; diff --git a/pkgs/contract/outputs/contracts-holesky.json b/pkgs/contract/outputs/contracts-holesky.json index fac16c8c..ece9b016 100644 --- a/pkgs/contract/outputs/contracts-holesky.json +++ b/pkgs/contract/outputs/contracts-holesky.json @@ -3,16 +3,16 @@ "Hats": "0x3bc1A0Ad72417f2d411118085256fC53CBdDd137", "HatsModuleFactory": "0xfE661c01891172046feE16D3a57c3Cf456729efA", "PullSplitsFactory": "0x80f1B766817D04870f115fEBbcCADF8DBF75E017", - "BigBang": "\"0x3E70d10aCdcC14B6C31DA26DcC195a6EDf1C2c16\"", - "FractionToken": "\"0x54889278bf4F16ACAa3CC1402C987A6C42a5308B\"", - "SplitsCreatorFactory": "\"0x2b44c1F5B0D2a6a39F83effbF48aA09C833EBe12\"", - "SplitsCreator": "\"0x09b853E0945d1c86af10b5665472501bD5F6627c\"", - "HatsTimeFrameModule": "\"0x808996331ADD2715854e31e3dd4f9a736DE23604\"", - "ProxyAdmin": "\"0xdAEC7C851DA8E9b041e4592fdCF19843Bc1f8bE8\"" + "BigBang": "0x3E70d10aCdcC14B6C31DA26DcC195a6EDf1C2c16", + "FractionToken": "0x54889278bf4F16ACAa3CC1402C987A6C42a5308B", + "SplitsCreatorFactory": "0x2b44c1F5B0D2a6a39F83effbF48aA09C833EBe12", + "SplitsCreator": "0x09b853E0945d1c86af10b5665472501bD5F6627c", + "HatsTimeFrameModule": "0x808996331ADD2715854e31e3dd4f9a736DE23604", + "ProxyAdmin": "0xdAEC7C851DA8E9b041e4592fdCF19843Bc1f8bE8" }, "implementations": { - "FractionToken_Implementation": "\"0x7F096Ba747bEAfFd61c4f5BEAeD4756fd767cc7e\"", - "SplitsCreatorFactory_Implementation": "\"0xfD82EeaE18133F9629Ee220Cf35d06951bccEf51\"", - "BigBang_Implementation": "\"0x8F7FaFBabdAD34Fb88785E4a07fef68F2aDb470E\"" + "FractionToken_Implementation": "0x7F096Ba747bEAfFd61c4f5BEAeD4756fd767cc7e", + "SplitsCreatorFactory_Implementation": "0xfD82EeaE18133F9629Ee220Cf35d06951bccEf51", + "BigBang_Implementation": "0x8F7FaFBabdAD34Fb88785E4a07fef68F2aDb470E" } } diff --git a/pkgs/contract/outputs/contracts-sepolia.json b/pkgs/contract/outputs/contracts-sepolia.json index 9a7763eb..2e2286f4 100644 --- a/pkgs/contract/outputs/contracts-sepolia.json +++ b/pkgs/contract/outputs/contracts-sepolia.json @@ -3,18 +3,18 @@ "Hats": "0x3bc1A0Ad72417f2d411118085256fC53CBdDd137", "HatsModuleFactory": "0x0a3f85fa597B6a967271286aA0724811acDF5CD9", "PullSplitsFactory": "0x80f1B766817D04870f115fEBbcCADF8DBF75E017", - "BigBang": "\"0xA764F51dcaE050DBFf8404D0bf5C089E4Fb85AdD\"", - "FractionToken": "\"0xd6031f9543bEB0963e32CA2AC474de69D0515059\"", - "SplitsCreatorFactory": "\"0xBBe355703d205dd1e0086fa836D01825601C1DDf\"", - "SplitsCreator": "\"0xC0D7B4E0485d71aDA9ffF9365E4e1938F8195dDa\"", - "HatsTimeFrameModule": "\"0xadC508C3BA5eD8b3A6D06e2eC8D17162d2DD6F4f\"", - "ProxyAdmin": "\"0xdAEC7C851DA8E9b041e4592fdCF19843Bc1f8bE8\"", - "ThanksTokenFactory_Implementation": "\"0xE0A3E42D126e545303A9C92423D4847D2ff68913\"", - "ThanksTokenFactory": "\"0x6b36bDEA3E0495e5f5bEBEE17c8170cBc93b7238\"" + "BigBang": "0xA764F51dcaE050DBFf8404D0bf5C089E4Fb85AdD", + "FractionToken": "0xd6031f9543bEB0963e32CA2AC474de69D0515059", + "SplitsCreatorFactory": "0xBBe355703d205dd1e0086fa836D01825601C1DDf", + "SplitsCreator": "0xC0D7B4E0485d71aDA9ffF9365E4e1938F8195dDa", + "HatsTimeFrameModule": "0xadC508C3BA5eD8b3A6D06e2eC8D17162d2DD6F4f", + "ProxyAdmin": "0xdAEC7C851DA8E9b041e4592fdCF19843Bc1f8bE8\"", + "ThanksTokenFactory_Implementation": "0xE0A3E42D126e545303A9C92423D4847D2ff68913", + "ThanksTokenFactory": "0x6b36bDEA3E0495e5f5bEBEE17c8170cBc93b7238" }, "implementations": { - "FractionToken_Implementation": "\"0x6d701256205019e40cE02e13D799ed2cd3BBE8e8\"", - "SplitsCreatorFactory_Implementation": "\"0x95394Da0460DCF87fC76F5b868D9251bf9738553\"", - "BigBang_Implementation": "\"0x1B956C5af5B5CA963F3005Dd6DB349713b7e9bbE\"" + "FractionToken_Implementation": "0x6d701256205019e40cE02e13D799ed2cd3BBE8e8", + "SplitsCreatorFactory_Implementation": "0x95394Da0460DCF87fC76F5b868D9251bf9738553", + "BigBang_Implementation": "0x1B956C5af5B5CA963F3005Dd6DB349713b7e9bbE" } } diff --git a/pkgs/contract/package.json b/pkgs/contract/package.json index 2d265db8..e2531499 100644 --- a/pkgs/contract/package.json +++ b/pkgs/contract/package.json @@ -10,8 +10,7 @@ "test": "npx hardhat test", "coverage": "SOLIDITY_COVERAGE=true npx hardhat coverage", "lint": "solhint --fix --noPrompt --save 'contracts/**/*.sol'", - "deploy:Lock": "npx hardhat ignition deploy ./ignition/modules/Lock.ts", - "deploy:all": "npx hardhat run scripts/deploy/all.ts", + "deploy:all": "npx hardhat run scripts/deploy/create2.ts", "local": "npx hardhat node", "getBalance": "npx hardhat getBalance", "getChainInfo": "npx hardhat getChainInfo", @@ -22,7 +21,8 @@ "upgrade:BigBang": "npx hardhat run scripts/upgrade/BigBang.ts", "bigbang": "npx hardhat bigbang", "getWoreTime": "npx hardhat getWoreTime", - "mintHat": "npx hardhat mintHat" + "mintHat": "npx hardhat mintHat", + "batchMintHat": "npx hardhat batchMintHat" }, "devDependencies": { "@nomicfoundation/hardhat-ethers": "^3.0.0", diff --git a/pkgs/contract/scripts/deploy/all.ts b/pkgs/contract/scripts/deploy/all.ts deleted file mode 100644 index 6fd8095a..00000000 --- a/pkgs/contract/scripts/deploy/all.ts +++ /dev/null @@ -1,107 +0,0 @@ -import * as dotenv from "dotenv"; -import { network } from "hardhat"; -import { type Address, zeroAddress } from "viem"; -import { deployBigBang } from "../../helpers/deploy/BigBang"; -import { deployFractionToken } from "../../helpers/deploy/FractionToken"; -import { - deployHatsHatCreatorModule, - deployHatsTimeFrameModule, -} from "../../helpers/deploy/Hats"; -import { - deploySplitsCreator, - deploySplitsCreatorFactory, -} from "../../helpers/deploy/Splits"; -import { - loadDeployedContractAddresses, - writeContractAddress, -} from "../../helpers/deploy/contractsJsonHelper"; - -dotenv.config(); - -/** - * Deploy all contracts - * @returns - */ -const deployAll = async () => { - console.log( - "##################################### [Deploy START] #####################################", - ); - - // Hats HatsModuleFactory PullSplitsFactoryコントラクトの各アドレスをjsonファイルから取得してくる。 - const { - contracts: { Hats, HatsModuleFactory, PullSplitsFactory }, - } = loadDeployedContractAddresses(network.name); - - const { HatsTimeFrameModule } = await deployHatsTimeFrameModule(); - const { HatsHatCreatorModule } = await deployHatsHatCreatorModule( - "0x0000000000000000000000000000000000000001", // zero address 以外のアドレスを仮に渡す - ); - - const { FractionToken } = await deployFractionToken( - "", - 10000n, - Hats as Address, - ); - - const { SplitsCreator } = await deploySplitsCreator(); - - const { SplitsCreatorFactory } = await deploySplitsCreatorFactory( - SplitsCreator.address, - ); - - const { BigBang } = await deployBigBang({ - hatsContractAddress: Hats as Address, - hatsModuleFacotryAddress: HatsModuleFactory as Address, - hatsTimeFrameModule_impl: HatsTimeFrameModule.address, - hatsHatCreatorModule_impl: HatsHatCreatorModule.address, - splitsCreatorFactoryAddress: SplitsCreatorFactory.address, - splitsFactoryV2Address: PullSplitsFactory as Address, - fractionTokenAddress: FractionToken.address, - }); - - console.log("BigBang deployed at", BigBang.address); - console.log("FractionToken deployed at", FractionToken.address); - console.log("SplitsCreatorFactory deployed at", SplitsCreatorFactory.address); - console.log("SplitsCreator deployed at", SplitsCreator.address); - console.log("HatsTimeFrameModule deployed at", HatsTimeFrameModule.address); - - // デプロイしたアドレスをjsonファイルに保存する。 - writeContractAddress({ - group: "contracts", - name: "BigBang", - value: BigBang.address, - network: network.name, - }); - writeContractAddress({ - group: "contracts", - name: "FractionToken", - value: FractionToken.address, - network: network.name, - }); - writeContractAddress({ - group: "contracts", - name: "SplitsCreatorFactory", - value: SplitsCreatorFactory.address, - network: network.name, - }); - writeContractAddress({ - group: "contracts", - name: "SplitsCreator", - value: SplitsCreator.address, - network: network.name, - }); - writeContractAddress({ - group: "contracts", - name: "HatsTimeFrameModule", - value: HatsTimeFrameModule.address, - network: network.name, - }); - - console.log( - "##################################### [Deploy END] #####################################", - ); - - return; -}; - -deployAll(); diff --git a/pkgs/contract/scripts/upgrade/hatsTimeFrameModule.ts b/pkgs/contract/scripts/upgrade/hatsTimeFrameModule.ts index bad8c6ed..7360dedb 100644 --- a/pkgs/contract/scripts/upgrade/hatsTimeFrameModule.ts +++ b/pkgs/contract/scripts/upgrade/hatsTimeFrameModule.ts @@ -1,15 +1,18 @@ import { network, viem } from "hardhat"; import { deployHatsTimeFrameModule } from "../../helpers/deploy/Hats"; +import { loadDeployedContractAddresses } from "../../helpers/deploy/contractsJsonHelper"; const upgrade = async () => { const module = await deployHatsTimeFrameModule( "0x0000000000000000000000000000000000000001", ); - const bigBang = await viem.getContractAt( - "BigBang", - "0x3E70d10aCdcC14B6C31DA26DcC195a6EDf1C2c16", - ); + // Load BigBang address from outputs JSON file + const { + contracts: { BigBang }, + } = loadDeployedContractAddresses(network.name); + + const bigBang = await viem.getContractAt("BigBang", BigBang); await bigBang.write.setHatsTimeFrameModuleImpl([ module.HatsTimeFrameModule.address, diff --git a/pkgs/contract/tasks/BigBang/bigbang.ts b/pkgs/contract/tasks/BigBang/bigbang.ts index b8d8d704..d07038ad 100644 --- a/pkgs/contract/tasks/BigBang/bigbang.ts +++ b/pkgs/contract/tasks/BigBang/bigbang.ts @@ -38,23 +38,118 @@ task("bigbang", "bigbang") // create BigBang instance const bigbang = await hre.viem.getContractAt("BigBang", BigBang); - const address = bobWalletClient.account?.address; - if (!address) { - throw new Error("Wallet client account address is undefined"); - } + console.log("Calling bigbang with parameters:"); + console.log(` owner: ${taskArgs.owner}`); + console.log(` tophatdetails: ${taskArgs.tophatdetails}`); + console.log(` tophatimageuri: ${taskArgs.tophatimageuri}`); + console.log(` hatterhatdetails: ${taskArgs.hatterhatdetails}`); + console.log(` hatterhatimageuri: ${taskArgs.hatterhatimageuri}`); + console.log(` memberhatdetails: ${taskArgs.memberhatdetails}`); + console.log(` memberhatimageuri: ${taskArgs.memberhatimageuri}`); + + try { + // call bigbang method with explicit gas limit + const tx = await bigbang.write.bigbang( + [ + taskArgs.owner as `0x${string}`, + taskArgs.tophatdetails, + taskArgs.tophatimageuri, + taskArgs.hatterhatdetails, + taskArgs.hatterhatimageuri, + taskArgs.memberhatdetails, + taskArgs.memberhatimageuri, + ], + { + gas: 15000000n, // 15M gas limit - much higher + }, + ); + + console.log(`Transaction hash: ${tx}`); + console.log("Waiting for transaction confirmation..."); + + // Get transaction receipt + const publicClient = await hre.viem.getPublicClient(); + const receipt = await publicClient.waitForTransactionReceipt({ + hash: tx, + confirmations: 1, + }); + + console.log(`Transaction confirmed in block: ${receipt.blockNumber}`); + console.log(`Gas used: ${receipt.gasUsed}`); + console.log(`Status: ${receipt.status}`); - // call bigbang method - const tx = await bigbang.write.bigbang([ - address, - taskArgs.tophatdetails, - taskArgs.tophatimageuri, - taskArgs.hatterhatdetails, - taskArgs.hatterhatimageuri, - taskArgs.memberhatdetails, - taskArgs.memberhatimageuri, - ]); - - console.log(`tx: ${tx}`); + // Check if transaction was successful + if (receipt.status === "reverted") { + console.log("❌ Transaction was reverted!"); + throw new Error("Transaction reverted"); + } + + console.log("✅ Transaction successful!"); + const executedEvents = await publicClient.getLogs({ + address: BigBang, + event: { + type: "event", + name: "Executed", + inputs: [ + { name: "creator", type: "address", indexed: true }, + { name: "owner", type: "address", indexed: true }, + { name: "topHatId", type: "uint256", indexed: true }, + { name: "hatterHatId", type: "uint256", indexed: false }, + { name: "memberHatId", type: "uint256", indexed: false }, + { name: "operatorHatId", type: "uint256", indexed: false }, + { name: "creatorHatId", type: "uint256", indexed: false }, + { name: "minterHatId", type: "uint256", indexed: false }, + { name: "hatsTimeFrameModule", type: "address", indexed: false }, + { name: "hatsHatCreatorModule", type: "address", indexed: false }, + { + name: "hatsFractionTokenModule", + type: "address", + indexed: false, + }, + { name: "splitCreator", type: "address", indexed: false }, + { name: "thanksToken", type: "address", indexed: false }, + ], + }, + fromBlock: receipt.blockNumber, + toBlock: receipt.blockNumber, + }); + + if (executedEvents.length > 0) { + console.log("\n=== BigBang Execution Results ==="); + const event = executedEvents[0]; + console.log(`Creator: ${event.args.creator}`); + console.log(`Owner: ${event.args.owner}`); + console.log(`TopHat ID: ${event.args.topHatId}`); + console.log(`HatterHat ID: ${event.args.hatterHatId}`); + console.log(`MemberHat ID: ${event.args.memberHatId}`); + console.log(`OperatorHat ID: ${event.args.operatorHatId}`); + console.log(`CreatorHat ID: ${event.args.creatorHatId}`); + console.log(`MinterHat ID: ${event.args.minterHatId}`); + console.log(`HatsTimeFrameModule: ${event.args.hatsTimeFrameModule}`); + console.log( + `HatsHatCreatorModule: ${event.args.hatsHatCreatorModule}`, + ); + console.log( + `HatsFractionTokenModule: ${event.args.hatsFractionTokenModule}`, + ); + console.log(`SplitCreator: ${event.args.splitCreator}`); + console.log(`ThanksToken: ${event.args.thanksToken}`); + + // Save important IDs for future reference + console.log("\n=== For batchMintHat Testing ==="); + console.log(`Use MinterHat ID: ${event.args.minterHatId}`); + console.log(`Target MemberHat ID: ${event.args.memberHatId}`); + console.log( + `HatsTimeFrameModule Address: ${event.args.hatsTimeFrameModule}`, + ); + } + + console.log("BigBang executed successfully!"); + } catch (error) { + console.error("An unexpected error occurred:"); + console.error(error); + throw error; + } console.log( "################################### [END] ###################################", diff --git a/pkgs/contract/tasks/HatsTimeFrameModule/batchMintHat.ts b/pkgs/contract/tasks/HatsTimeFrameModule/batchMintHat.ts new file mode 100644 index 00000000..cef9373b --- /dev/null +++ b/pkgs/contract/tasks/HatsTimeFrameModule/batchMintHat.ts @@ -0,0 +1,223 @@ +import * as fs from "node:fs"; +import * as path from "node:path"; +import { task } from "hardhat/config"; +import type { HardhatRuntimeEnvironment } from "hardhat/types"; +import { isAddress } from "viem"; +import { loadDeployedContractAddresses } from "../../helpers/deploy/contractsJsonHelper"; + +interface BatchMintRecord { + address: `0x${string}`; + timestamp?: bigint; +} + +/** + * Parse CSV file and return batch mint records + * Expected CSV format: address,timestamp (header required) + */ +function parseCsvFile(filePath: string): BatchMintRecord[] { + const csvContent = fs.readFileSync(filePath, "utf-8"); + const lines = csvContent.split("\n").filter((line) => line.trim()); + + if (lines.length < 2) { + throw new Error("CSV file must have a header and at least one data row"); + } + + const header = lines[0].split(",").map((col) => col.trim()); + const addressIndex = header.findIndex( + (col) => col.toLowerCase() === "address", + ); + const timestampIndex = header.findIndex( + (col) => col.toLowerCase() === "timestamp", + ); + + if (addressIndex === -1) { + throw new Error("CSV file must have an 'address' column"); + } + + const batchRecords: BatchMintRecord[] = []; + + for (let i = 1; i < lines.length; i++) { + const columns = lines[i].split(",").map((col) => col.trim()); + + if (columns.length <= addressIndex) { + continue; // Skip invalid rows + } + + // Address validation + const address = columns[addressIndex]?.toLowerCase(); + if (!address || !isAddress(address)) { + throw new Error( + `Invalid address at row ${i + 1}: ${columns[addressIndex]}`, + ); + } + + // Timestamp parsing (optional) + let timestamp: bigint | undefined; + if (timestampIndex !== -1 && columns[timestampIndex]) { + const timestampNum = Number(columns[timestampIndex]); + if (Number.isNaN(timestampNum) || timestampNum < 0) { + throw new Error( + `Invalid timestamp at row ${i + 1}: ${columns[timestampIndex]}`, + ); + } + timestamp = BigInt(timestampNum); + } + + batchRecords.push({ + address: address as `0x${string}`, + timestamp, + }); + } + + return batchRecords; +} + +/** + * 【Task】call batchMintHat of HatsTimeFrameModule + * Usage: + * - From CSV file: npx hardhat batchMintHat --hatid 123 --csv ./data/example-wearers.csv --network sepolia + * - From addresses: npx hardhat batchMintHat --hatid 123 --addresses "0x123...,0x456..." --network sepolia + */ +task("batchMintHat", "Batch mint hats to multiple addresses") + .addParam("hatid", "Hat ID to mint") + .addOptionalParam( + "csv", + "Path to CSV file containing addresses and optional timestamps", + ) + .addOptionalParam("addresses", "Comma-separated list of addresses") + .addOptionalParam("batchsize", "Maximum batch size (default: 50)", "50") + .addOptionalParam( + "module", + "HatsTimeFrameModule instance address (clone). If omitted, uses the address from outputs", + ) + .setAction( + async ( + taskArgs: { + hatid: string; + csv?: string; + addresses?: string; + batchsize: string; + module?: `0x${string}`; + }, + hre: HardhatRuntimeEnvironment, + ) => { + console.log( + "################################### [START] batchMintHat ###################################", + ); + + const hatId = BigInt(taskArgs.hatid); + const batchSize = Number.parseInt(taskArgs.batchsize); + + if (batchSize <= 0 || batchSize > 100) { + throw new Error("Batch size must be between 1 and 100"); + } + + // Parse input data + let batchRecords: BatchMintRecord[] = []; + + if (taskArgs.csv) { + console.log(`Reading from CSV file: ${taskArgs.csv}`); + const csvPath = path.resolve(taskArgs.csv); + if (!fs.existsSync(csvPath)) { + throw new Error(`CSV file not found: ${csvPath}`); + } + batchRecords = parseCsvFile(csvPath); + } else if (taskArgs.addresses) { + console.log("Processing addresses from command line"); + const addresses = taskArgs.addresses + .split(",") + .map((addr) => addr.trim()); + for (const address of addresses) { + if (!isAddress(address)) { + throw new Error(`Invalid address: ${address}`); + } + batchRecords.push({ + address: address as `0x${string}`, + }); + } + } else { + throw new Error("Either --csv or --addresses parameter is required"); + } + + if (batchRecords.length === 0) { + throw new Error("No valid addresses found"); + } + + console.log(`Total addresses to process: ${batchRecords.length}`); + + // Load contract + const { + contracts: { HatsTimeFrameModule }, + } = loadDeployedContractAddresses(hre.network.name); + + const moduleAddress = (taskArgs.module || + HatsTimeFrameModule) as `0x${string}`; + const hatsTimeFrameModule = await hre.viem.getContractAt( + "HatsTimeFrameModule", + moduleAddress, + ); + + // Type assertion for new method that may not be in type definitions yet + type ExtendedContract = typeof hatsTimeFrameModule & { + write: typeof hatsTimeFrameModule.write & { + batchMintHat: ( + args: [bigint, `0x${string}`[], bigint[]], + ) => Promise<`0x${string}`>; + }; + }; + const contractWithBatchMint = hatsTimeFrameModule as ExtendedContract; + + // Process in batches + const totalBatches = Math.ceil(batchRecords.length / batchSize); + console.log( + `Processing in ${totalBatches} batch(es) of max ${batchSize} addresses each`, + ); + + for (let i = 0; i < totalBatches; i++) { + const startIndex = i * batchSize; + const endIndex = Math.min(startIndex + batchSize, batchRecords.length); + const batch = batchRecords.slice(startIndex, endIndex); + + console.log( + `\n--- Batch ${i + 1}/${totalBatches} (${batch.length} addresses) ---`, + ); + + const wearers = batch.map((record) => record.address); + const times = batch.map((record) => record.timestamp || 0n); + + try { + console.log("Addresses:", wearers); + console.log( + "Timestamps:", + times.map((t) => t.toString()), + ); + + const tx = await contractWithBatchMint.write.batchMintHat([ + hatId, + wearers, + times, + ]); + + console.log(`✅ Batch ${i + 1} transaction: ${tx}`); + console.log(`module: ${moduleAddress}`); + + // Wait for transaction confirmation + const publicClient = await hre.viem.getPublicClient(); + const receipt = await publicClient.waitForTransactionReceipt({ + hash: tx, + }); + + console.log( + `✅ Batch ${i + 1} confirmed in block: ${receipt.blockNumber}`, + ); + } catch (error) { + console.error(`❌ Batch ${i + 1} failed:`, error); + throw error; + } + } + + console.log( + "\n################################### [END] batchMintHat ###################################", + ); + }, + ); diff --git a/pkgs/contract/tasks/HatsTimeFrameModule/mintHat.ts b/pkgs/contract/tasks/HatsTimeFrameModule/mintHat.ts index a8c8b952..d8575f3a 100644 --- a/pkgs/contract/tasks/HatsTimeFrameModule/mintHat.ts +++ b/pkgs/contract/tasks/HatsTimeFrameModule/mintHat.ts @@ -8,11 +8,16 @@ import { loadDeployedContractAddresses } from "../../helpers/deploy/contractsJso task("mintHat", "mintHat") .addParam("hatid", "hatid") .addParam("wearer", "address of wearer") + .addOptionalParam( + "module", + "HatsTimeFrameModule instance address (clone). If omitted, uses the address from outputs", + ) .setAction( async ( taskArgs: { hatid: bigint; wearer: `0x${string}`; + module?: `0x${string}`; }, hre: HardhatRuntimeEnvironment, ) => { @@ -26,9 +31,11 @@ task("mintHat", "mintHat") } = loadDeployedContractAddresses(hre.network.name); // create HatsTimeFrameModule instance + const moduleAddress = (taskArgs.module || + HatsTimeFrameModule) as `0x${string}`; const hatsTimeFrameModuleByBigBang = await hre.viem.getContractAt( "HatsTimeFrameModule", - HatsTimeFrameModule, + moduleAddress, ); // call mintHat method @@ -39,6 +46,7 @@ task("mintHat", "mintHat") ]); console.log(`tx: ${tx}`); + console.log(`module: ${moduleAddress}`); console.log( "################################### [END] ###################################", diff --git a/pkgs/contract/tasks/index.ts b/pkgs/contract/tasks/index.ts index 0202da48..3c1934a9 100644 --- a/pkgs/contract/tasks/index.ts +++ b/pkgs/contract/tasks/index.ts @@ -1,4 +1,5 @@ import * as bigbang from "./BigBang/bigbang"; +import * as batchMintHat from "./HatsTimeFrameModule/batchMintHat"; import * as getWoreTime from "./HatsTimeFrameModule/getWoreTime"; import * as mintHat from "./HatsTimeFrameModule/mintHat"; import * as registerSubdomain from "./ens/registerSubdomain"; @@ -8,6 +9,7 @@ import * as getContractAddress from "./utils/getContractAddress"; import * as resetContractAddressesJson from "./utils/resetContractAddressesJson"; export { + batchMintHat, bigbang, getBalance, getChainInfo, diff --git a/pkgs/contract/test/BigBang.ts b/pkgs/contract/test/BigBang.ts index 3e6306d4..c7050dbc 100644 --- a/pkgs/contract/test/BigBang.ts +++ b/pkgs/contract/test/BigBang.ts @@ -1,30 +1,22 @@ import { expect } from "chai"; import { viem } from "hardhat"; -import { - type PublicClient, - type WalletClient, - Address, - decodeEventLog, - zeroAddress, -} from "viem"; +import { type PublicClient, type WalletClient, decodeEventLog } from "viem"; import { type BigBang, deployBigBang } from "../helpers/deploy/BigBang"; import { - type ThanksToken, - type ThanksTokenFactory, - deployThanksToken, - deployThanksTokenFactory, -} from "../helpers/deploy/ThanksToken"; + Create2Deployer, + deployCreate2Deployer, +} from "../helpers/deploy/Create2Factory"; import { type Hats, + type HatsFractionTokenModule, + type HatsHatCreatorModule, type HatsModuleFactory, type HatsTimeFrameModule, - type HatsHatCreatorModule, - type HatsFractionTokenModule, + deployHatsFractionTokenModule, + deployHatsHatCreatorModule, deployHatsModuleFactory, deployHatsProtocol, deployHatsTimeFrameModule, - deployHatsHatCreatorModule, - deployHatsFractionTokenModule, } from "../helpers/deploy/Hats"; import { type PullSplitsFactory, @@ -36,11 +28,12 @@ import { deploySplitsCreatorFactory, deploySplitsProtocol, } from "../helpers/deploy/Splits"; -import { upgradeBigBang } from "../helpers/upgrade/bigbang"; import { - Create2Deployer, - deployCreate2Deployer, -} from "../helpers/deploy/Create2Factory"; + type ThanksToken, + type ThanksTokenFactory, + deployThanksToken, + deployThanksTokenFactory, +} from "../helpers/deploy/ThanksToken"; describe("BigBang", () => { let Create2Deployer: Create2Deployer; @@ -153,6 +146,24 @@ describe("BigBang", () => { }); it("should execute bigbang", async () => { + // First deploy BigBang for this test if not already deployed + if (!BigBang) { + const { BigBang: _BigBang } = await deployBigBang( + { + hatsContractAddress: Hats.address, + hatsModuleFacotryAddress: HatsModuleFactory.address, + hatsTimeFrameModule_impl: HatsTimeFrameModule_IMPL.address, + hatsHatCreatorModule_impl: HatsHatCreatorModule_IMPL.address, + hatsFractionTokenModule_impl: HatsFractionTokenModule_IMPL.address, + splitsCreatorFactoryAddress: SplitsCreatorFactory.address, + splitsFactoryV2Address: PullSplitsFactory.address, + thanksTokenFactoryAddress: ThanksTokenFactory.address, + }, + Create2Deployer.address, + ); + BigBang = _BigBang; + } + // SplitsCreatorFactoryにBigBangアドレスをセット await SplitsCreatorFactory.write.setBigBang([BigBang.address]); diff --git a/pkgs/contract/test/HatsTimeFrameModule.ts b/pkgs/contract/test/HatsTimeFrameModule.ts index d6166927..a5d6242e 100644 --- a/pkgs/contract/test/HatsTimeFrameModule.ts +++ b/pkgs/contract/test/HatsTimeFrameModule.ts @@ -3,6 +3,10 @@ import { expect } from "chai"; import { viem } from "hardhat"; import type { Address, PublicClient, WalletClient } from "viem"; import { decodeEventLog, encodeAbiParameters } from "viem"; +import { + Create2Deployer, + deployCreate2Deployer, +} from "../helpers/deploy/Create2Factory"; import { type Hats, type HatsModuleFactory, @@ -11,10 +15,6 @@ import { deployHatsProtocol, deployHatsTimeFrameModule, } from "../helpers/deploy/Hats"; -import { - Create2Deployer, - deployCreate2Deployer, -} from "../helpers/deploy/Create2Factory"; describe("HatsTimeFrameModule", () => { let Create2Deployer: Create2Deployer; @@ -287,4 +287,226 @@ describe("HatsTimeFrameModule", () => { expect(woreTime).to.equal(0n); }); + + describe("batchMintHat", () => { + it("should mint multiple hats successfully", async () => { + if (!roleHatId) { + throw new Error("Role hat ID is undefined"); + } + + // Create new hat for this test to avoid conflicts + const testRoleHatId = await createHat( + publicClient, + roleHatId, + "Test Batch Role Hat", + ); + + const wearers = [address1Validated, address2Validated]; + const times = [0n, 0n]; // Use current timestamp + + const initialTime = BigInt(await time.latest()); + + await HatsTimeFrameModule.write.batchMintHat([ + testRoleHatId, + wearers, + times, + ]); + + const afterMintTime = BigInt(await time.latest()); + + // Check both addresses received hats + expect( + await Hats.read.balanceOf([address1Validated, testRoleHatId]), + ).to.equal(1n); + expect( + await Hats.read.balanceOf([address2Validated, testRoleHatId]), + ).to.equal(1n); + + // Check wore times are set correctly + const woreTime1 = await HatsTimeFrameModule.read.getWoreTime([ + address1Validated, + testRoleHatId, + ]); + const woreTime2 = await HatsTimeFrameModule.read.getWoreTime([ + address2Validated, + testRoleHatId, + ]); + + expect(Number(woreTime1)).to.be.greaterThanOrEqual(Number(initialTime)); + expect(Number(woreTime1)).to.be.lessThanOrEqual(Number(afterMintTime)); + expect(Number(woreTime2)).to.be.greaterThanOrEqual(Number(initialTime)); + expect(Number(woreTime2)).to.be.lessThanOrEqual(Number(afterMintTime)); + + // Check both are active + expect( + await HatsTimeFrameModule.read.isActive([ + testRoleHatId, + address1Validated, + ]), + ).to.be.true; + expect( + await HatsTimeFrameModule.read.isActive([ + testRoleHatId, + address2Validated, + ]), + ).to.be.true; + }); + + it("should revert if not authorized", async () => { + if (!roleHatId) { + throw new Error("Role hat ID is undefined"); + } + + // Create new hat for this test + const testRoleHatId = await createHat( + publicClient, + roleHatId, + "Test Auth Role Hat", + ); + + const wearers = [address1Validated]; + const times = [0n]; + + // Switch to unauthorized account + const HatsTimeFrameModuleUnauthorized = await viem.getContractAt( + "HatsTimeFrameModule", + HatsTimeFrameModule.address, + { client: { wallet: address2 } }, + ); + + await expect( + HatsTimeFrameModuleUnauthorized.write.batchMintHat([ + testRoleHatId, + wearers, + times, + ]), + ).to.be.rejectedWith("Not authorized"); + }); + + it("should revert if array lengths mismatch", async () => { + if (!roleHatId) { + throw new Error("Role hat ID is undefined"); + } + + // Create new hat for this test + const testRoleHatId = await createHat( + publicClient, + roleHatId, + "Test Array Role Hat", + ); + + const wearers = [address1Validated, address2Validated]; + const times = [0n]; // Mismatched length + + await expect( + HatsTimeFrameModule.write.batchMintHat([testRoleHatId, wearers, times]), + ).to.be.rejectedWith("Array length mismatch"); + }); + + it("should revert if wearers array is empty", async () => { + if (!roleHatId) { + throw new Error("Role hat ID is undefined"); + } + + // Create new hat for this test + const testRoleHatId = await createHat( + publicClient, + roleHatId, + "Test Empty Role Hat", + ); + + const wearers: Address[] = []; + const times: bigint[] = []; + + await expect( + HatsTimeFrameModule.write.batchMintHat([testRoleHatId, wearers, times]), + ).to.be.rejectedWith("Empty wearers array"); + }); + + it("should revert if batch size exceeds limit", async () => { + if (!roleHatId) { + throw new Error("Role hat ID is undefined"); + } + + // Create new hat for this test + const testRoleHatId = await createHat( + publicClient, + roleHatId, + "Test Large Role Hat", + ); + + // Create arrays with 101 items (exceeds limit of 100) + const wearers = Array(101).fill(address1Validated) as Address[]; + const times = Array(101).fill(0n) as bigint[]; + + await expect( + HatsTimeFrameModule.write.batchMintHat([testRoleHatId, wearers, times]), + ).to.be.rejectedWith("Batch size too large"); + }); + + it("should revert if any hat already minted", async () => { + if (!roleHatId) { + throw new Error("Role hat ID is undefined"); + } + + // Create new hat for this test + const testRoleHatId = await createHat( + publicClient, + roleHatId, + "Test Existing Role Hat", + ); + + // First mint a hat to address1 + await HatsTimeFrameModule.write.mintHat([ + testRoleHatId, + address1Validated, + 0n, + ]); + + // Try to batch mint including the already minted address + const wearers = [address1Validated, address2Validated]; + const times = [0n, 0n]; + + await expect( + HatsTimeFrameModule.write.batchMintHat([testRoleHatId, wearers, times]), + ).to.be.rejectedWith("Hat already minted"); + }); + + it("should handle mixed timestamps correctly", async () => { + if (!roleHatId) { + throw new Error("Role hat ID is undefined"); + } + + // Create new hat for this test + const newRoleHatId = await createHat( + publicClient, + roleHatId, + "New Role Hat", + ); + + const currentTime = BigInt(await time.latest()); + const pastTime = currentTime - 1000n; + + const wearers = [address1Validated, address2Validated]; + const times = [pastTime, 0n]; // One past time, one current time + + await HatsTimeFrameModule.write.batchMintHat([ + newRoleHatId, + wearers, + times, + ]); + + const woreTime1 = await HatsTimeFrameModule.read.getWoreTime([ + address1Validated, + newRoleHatId, + ]); + const woreTime2 = await HatsTimeFrameModule.read.getWoreTime([ + address2Validated, + newRoleHatId, + ]); + + expect(woreTime1).to.equal(pastTime); + expect(Number(woreTime2)).to.be.greaterThan(Number(currentTime)); + }); + }); }); diff --git a/pkgs/frontend/app/components/Header.tsx b/pkgs/frontend/app/components/Header.tsx index ec2f75d0..43b8023a 100644 --- a/pkgs/frontend/app/components/Header.tsx +++ b/pkgs/frontend/app/components/Header.tsx @@ -76,14 +76,57 @@ export const Header = () => { return avatar ? ipfs2https(avatar) : undefined; }, [identity]); - const handleLogout = () => { - if (isSmartWallet) { - logout(); - } else { - if (wallets.find((w) => w.connectorType === "injected")) { - alert("ウォレット拡張機能から切断してください。"); + const handleLogout = async () => { + try { + if (isSmartWallet) { + // スマートウォレットの場合、Privyのlogoutを使用 + await logout(); } else { - Promise.all(wallets.map((wallet) => wallet.disconnect())); + // 外部ウォレット(MetaMaskなど)の場合 + const hasInjectedWallet = wallets.some( + (w) => w.connectorType === "injected", + ); + + if (hasInjectedWallet) { + // MetaMaskの権限を無効化 + try { + if (window.ethereum) { + await window.ethereum.request({ + method: "wallet_revokePermissions", + params: [{ eth_accounts: {} }], + }); + } + } catch (revokeError) { + console.warn("Failed to revoke MetaMask permissions:", revokeError); + } + } else { + // その他の外部ウォレットの場合 + // Privyのlogoutを実行してからウォレット切断を試行 + await logout(); + + // 切断可能なウォレットのみ切断 + for (const wallet of wallets) { + if (wallet.connectorType !== "injected") { + try { + wallet.disconnect(); + } catch (error) { + console.warn( + "Failed to disconnect wallet:", + wallet.address, + error, + ); + } + } + } + } + } + } catch (error) { + console.error("Logout failed:", error); + // エラーが発生した場合のフォールバック処理 + try { + await logout(); + } catch (logoutError) { + console.error("Fallback logout also failed:", logoutError); } } }; diff --git a/pkgs/frontend/app/components/SwitchNetwork.tsx b/pkgs/frontend/app/components/SwitchNetwork.tsx index 20db900c..ffb8945e 100644 --- a/pkgs/frontend/app/components/SwitchNetwork.tsx +++ b/pkgs/frontend/app/components/SwitchNetwork.tsx @@ -1,22 +1,115 @@ +import { Box, Button, Stack, Text } from "@chakra-ui/react"; import { currentChain } from "hooks/useViem"; import { useActiveWallet } from "hooks/useWallet"; -import { type FC, useEffect } from "react"; +import { type FC, useEffect, useState } from "react"; +import { + DialogBody, + DialogContent, + DialogFooter, + DialogHeader, + DialogRoot, + DialogTitle, +} from "./ui/dialog"; export const SwitchNetwork: FC = () => { const { connectedWallet } = useActiveWallet(); + const [isOpen, setIsOpen] = useState(false); + const [isSwitching, setIsSwitching] = useState(false); + // チェーン不一致の検出 useEffect(() => { - const switchChain = async () => { - if ( - connectedWallet && - Number(connectedWallet.chainId) !== currentChain.id - ) { - await connectedWallet.switchChain(currentChain.id); - } - }; - - switchChain(); + if (connectedWallet?.chainId) { + const isChainMismatch = + Number(connectedWallet.chainId.replace("eip155:", "")) !== + currentChain.id; + setIsOpen(isChainMismatch); + } else { + setIsOpen(false); + } }, [connectedWallet]); - return <>; + // チェーン切り替え処理 + const handleSwitchChain = async () => { + if (!connectedWallet) return; + + try { + setIsSwitching(true); + await connectedWallet.switchChain(currentChain.id); + setIsOpen(false); + } catch (error) { + console.error("Failed to switch chain:", error); + // エラー時はModalを開いたままにする + } finally { + setIsSwitching(false); + } + }; + + return ( + + + + + ネットワークの切り替えが必要です + + + + + + + 現在のネットワークは対応していません。以下のネットワークに切り替えてください。 + + + + + 現在のネットワーク + + + {connectedWallet?.chainId + ? `Chain ID: ${connectedWallet.chainId.replace("eip155:", "")}` + : "未接続"} + + + + + + 必要なネットワーク + + + {currentChain.name} (Chain ID: {currentChain.id}) + + + + + + + + + + + ); };