diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 0bf52e1c..dcbbeb48 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -18,18 +18,45 @@ jobs: contents: write steps: - - name: Determine tag name + - name: Determine tag name and workflow branch id: tag + env: + INPUT_TAG: ${{ github.event.inputs.tag }} run: | if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then - TAG="${{ github.event.inputs.tag }}" + TAG="$INPUT_TAG" + # For workflow_dispatch, use the branch where workflow is running + WORKFLOW_REF="${{ github.ref }}" else TAG="${GITHUB_REF#refs/tags/}" + # For tag pushes, use default branch (main) to get script + # The script must exist in the default branch for workflow_dispatch to work anyway + WORKFLOW_REF="main" fi echo "tag=$TAG" >> $GITHUB_OUTPUT + echo "workflow_ref=$WORKFLOW_REF" >> $GITHUB_OUTPUT echo "Determined tag: $TAG" + echo "Workflow ref for script: $WORKFLOW_REF" - - name: Checkout repository + - name: Checkout workflow branch (to get script) + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + ref: ${{ steps.tag.outputs.workflow_ref }} + + - name: Save extraction script + run: | + # Save the script to a temp location that won't be affected by tag checkout + mkdir -p /tmp/workflow-scripts + if [ -f "scripts/extract-artifacts.js" ]; then + cp scripts/extract-artifacts.js /tmp/workflow-scripts/extract-artifacts.js + echo "✓ Saved script from workflow branch" + else + echo "Warning: scripts/extract-artifacts.js not found in workflow branch" + fi + + - name: Checkout code at tag uses: actions/checkout@v4 with: submodules: recursive @@ -39,33 +66,25 @@ jobs: - name: Install Foundry uses: foundry-rs/foundry-toolchain@v1 - - name: Install gh CLI - run: | - type -p curl >/dev/null || sudo apt install curl -y - curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \ - && sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \ - && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ - && sudo apt update \ - && sudo apt install gh -y - - name: Build contracts run: forge build - name: Extract contract artifacts id: extract run: | - node scripts/extract-artifacts.js - - - name: Authenticate GitHub CLI - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - echo "$GITHUB_TOKEN" | gh auth login --with-token + # Use the script from the workflow branch (saved earlier) + if [ -f "/tmp/workflow-scripts/extract-artifacts.js" ]; then + echo "Using script from workflow branch" + node /tmp/workflow-scripts/extract-artifacts.js + else + echo "Error: Extraction script not found. Make sure scripts/extract-artifacts.js exists in your workflow branch." + exit 1 + fi - name: Create release if it doesn't exist id: create-release env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | TAG="${{ steps.tag.outputs.tag }}" @@ -104,7 +123,7 @@ jobs: - name: Upload artifacts to release env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | TAG="${{ steps.tag.outputs.tag }}" RELEASE_ACTION="${{ steps.create-release.outputs.release_action }}" @@ -124,9 +143,11 @@ jobs: # Create a list of files to upload find artifacts -type f > /tmp/artifacts_list.txt + # Create temporary directory for renamed files + mkdir -p /tmp/release-assets + upload_count=0 failed_count=0 - skipped_count=0 # Process each file while IFS= read -r artifact_file; do @@ -134,23 +155,27 @@ jobs: contract_name=$(basename $(dirname "$artifact_file")) # Create a descriptive name for the asset + # Copy file to temp location with desired name for upload asset_name="${contract_name}/${filename}" + temp_asset_path="/tmp/release-assets/${asset_name}" + + # Create subdirectory structure in temp location + mkdir -p "$(dirname "$temp_asset_path")" + + # Copy file to temp location with desired name + cp "$artifact_file" "$temp_asset_path" echo "Uploading $artifact_file as $asset_name" # Use --clobber to overwrite existing assets if they exist - upload_output=$(gh release upload "$TAG" "$artifact_file" \ + upload_output=$(gh release upload "$TAG" "$temp_asset_path" \ --repo "${{ github.repository }}" \ - --clobber \ - --name "$asset_name" 2>&1) + --clobber 2>&1) upload_exit_code=$? if [ $upload_exit_code -eq 0 ]; then echo "✓ Successfully uploaded $asset_name" upload_count=$((upload_count + 1)) - elif echo "$upload_output" | grep -q "already exists"; then - echo "⚠ Asset $asset_name already exists (skipped, use --clobber to overwrite)" - skipped_count=$((skipped_count + 1)) else echo "✗ Failed to upload $asset_name" echo " Error: $upload_output" @@ -158,7 +183,9 @@ jobs: fi done < /tmp/artifacts_list.txt + # Cleanup rm -f /tmp/artifacts_list.txt + rm -rf /tmp/release-assets echo "" echo "==========================================" @@ -166,7 +193,6 @@ jobs: echo " Tag: $TAG" echo " Release action: $RELEASE_ACTION" echo " Files uploaded: $upload_count" - echo " Files skipped (already exist): $skipped_count" echo " Files failed: $failed_count" echo " Contracts processed: $(find artifacts -mindepth 1 -maxdepth 1 -type d | wc -l)" echo "==========================================" @@ -176,8 +202,9 @@ jobs: exit 1 fi - if [ $upload_count -eq 0 ] && [ $skipped_count -gt 0 ]; then - echo "Note: All artifacts already exist on the release. Use --clobber to overwrite." + if [ $upload_count -eq 0 ]; then + echo "Warning: No files were uploaded" + exit 1 fi else echo "Error: No artifacts found to upload" diff --git a/scripts/extract-artifacts.js b/scripts/extract-artifacts.js index 23c5ce14..a8f2f77a 100644 --- a/scripts/extract-artifacts.js +++ b/scripts/extract-artifacts.js @@ -2,6 +2,7 @@ const fs = require('fs'); const path = require('path'); +const crypto = require('crypto'); // List of contracts to extract // Foundry outputs artifacts at: out/{ContractName}.sol/{ContractName}.json @@ -14,22 +15,153 @@ const contracts = [ 'PredicateProtected', 'IPredicateProtected', 'MetaCoin', + 'PredicateHolding', + 'TransparentUpgradeableProxy', +]; + +// Contracts that need Standard JSON Input for block explorer verification +// These are the contracts that get deployed and users interact with +const contractsNeedingVerification = [ + 'PredicateRegistry', + 'MetaCoin', + 'PredicateHolding', + 'TransparentUpgradeableProxy', ]; const artifactsDir = 'artifacts'; const outDir = 'out'; +/** + * Read source file content from the filesystem + * Handles both local sources (src/) and library sources (lib/) + */ +function readSourceFile(sourcePath) { + try { + const content = fs.readFileSync(sourcePath, 'utf8'); + return content; + } catch (error) { + console.log(`⚠ Could not read source file: ${sourcePath}`); + return null; + } +} + +/** + * Generate Standard JSON Input for a contract + * This format is used for block explorer verification (Etherscan, etc.) + */ +function generateStandardJsonInput(artifact, contractName) { + try { + // Parse the raw metadata which contains source dependencies and settings + const metadata = JSON.parse(artifact.rawMetadata); + + // Get the list of source files this contract depends on + const sourcePaths = Object.keys(metadata.sources || {}); + + if (sourcePaths.length === 0) { + console.log(`⚠ No sources found in metadata for ${contractName}`); + return null; + } + + // Read the content of each source file + const sources = {}; + for (const sourcePath of sourcePaths) { + const content = readSourceFile(sourcePath); + if (content) { + sources[sourcePath] = { content }; + } else { + // If we can't read a file, we can't create a valid Standard JSON Input + console.log(`⚠ Skipping Standard JSON for ${contractName}: missing source ${sourcePath}`); + return null; + } + } + + // Build the Standard JSON Input + const standardJsonInput = { + language: 'Solidity', + sources: sources, + settings: { + optimizer: metadata.settings?.optimizer || { enabled: false, runs: 200 }, + evmVersion: metadata.settings?.evmVersion || 'paris', + remappings: metadata.settings?.remappings || [], + metadata: metadata.settings?.metadata || { bytecodeHash: 'ipfs' }, + outputSelection: { + '*': { + '*': ['abi', 'evm.bytecode', 'evm.deployedBytecode', 'metadata'], + }, + }, + }, + }; + + // Add viaIR if it was used (important for bytecode matching) + if (metadata.settings?.viaIR) { + standardJsonInput.settings.viaIR = true; + } + + // Add libraries if any were used + if (metadata.settings?.libraries && Object.keys(metadata.settings.libraries).length > 0) { + standardJsonInput.settings.libraries = metadata.settings.libraries; + } + + return standardJsonInput; + } catch (error) { + console.log(`⚠ Error generating Standard JSON Input for ${contractName}: ${error.message}`); + return null; + } +} + +/** + * Generate compiler settings JSON for a contract + * This is a separate file for easy access to compiler configuration + */ +function generateCompilerSettings(artifact, contractName) { + try { + const metadata = JSON.parse(artifact.rawMetadata); + + return { + compilerVersion: `v${metadata.compiler?.version || 'unknown'}`, + language: metadata.language || 'Solidity', + evmVersion: metadata.settings?.evmVersion || 'default', + optimizer: { + enabled: metadata.settings?.optimizer?.enabled || false, + runs: metadata.settings?.optimizer?.runs || 200, + }, + viaIR: metadata.settings?.viaIR || false, + metadata: { + bytecodeHash: metadata.settings?.metadata?.bytecodeHash || 'ipfs', + }, + compilationTarget: metadata.settings?.compilationTarget || {}, + }; + } catch (error) { + console.log(`⚠ Error generating compiler settings for ${contractName}: ${error.message}`); + return null; + } +} + // Ensure artifacts directory exists if (!fs.existsSync(artifactsDir)) { fs.mkdirSync(artifactsDir, { recursive: true }); } const extractedContracts = []; +let compilerMetadataExtracted = false; // Process each contract for (const contractName of contracts) { - // Foundry flattens structure by contract name - const jsonFile = path.join(outDir, `${contractName}.sol`, `${contractName}.json`); + // Foundry creates artifacts based on directory structure + // For contracts in inheritance examples, check inheritance directory first, then default + let jsonFile; + if (contractName === 'MetaCoin') { + // MetaCoin from inheritance pattern: src/examples/inheritance/MetaCoin.sol + jsonFile = path.join(outDir, 'inheritance', `${contractName}.sol`, `${contractName}.json`); + } else if (contractName === 'PredicateHolding') { + // PredicateHolding may be in default location or inheritance directory + const defaultPath = path.join(outDir, `${contractName}.sol`, `${contractName}.json`); + const inheritancePath = path.join(outDir, 'inheritance', `${contractName}.sol`, `${contractName}.json`); + jsonFile = fs.existsSync(defaultPath) ? defaultPath : inheritancePath; + } else { + // Default: Foundry flattens structure by contract name + jsonFile = path.join(outDir, `${contractName}.sol`, `${contractName}.json`); + } // Check if the artifact file exists if (!fs.existsSync(jsonFile)) { @@ -44,6 +176,50 @@ for (const contractName of contracts) { const artifactContent = fs.readFileSync(jsonFile, 'utf8'); const artifact = JSON.parse(artifactContent); + // Extract compiler metadata (only once, from first contract) + if (!compilerMetadataExtracted && artifact.metadata) { + try { + const metadata = JSON.parse(artifact.metadata); + const compilerInfo = { + compiler: { + version: metadata.compiler?.version || 'unknown', + }, + settings: { + optimizer: { + enabled: metadata.settings?.optimizer?.enabled || false, + runs: metadata.settings?.optimizer?.runs || 200, + }, + evmVersion: metadata.settings?.evmVersion || 'default', + viaIR: metadata.settings?.viaIR || false, + }, + }; + + const compilerMetadataFile = path.join(artifactsDir, 'compiler-metadata.json'); + fs.writeFileSync(compilerMetadataFile, JSON.stringify(compilerInfo, null, 2)); + console.log(`✓ Extracted compiler metadata`); + compilerMetadataExtracted = true; + } catch (error) { + console.log(`⚠ Could not parse compiler metadata: ${error.message}`); + } + } + + // Extract source file information from metadata for verification + let sourceFile = 'unknown'; + let compilationTarget = null; + if (artifact.metadata) { + try { + const metadata = JSON.parse(artifact.metadata); + compilationTarget = metadata.settings?.compilationTarget || {}; + // Get the source file from compilation target (most reliable) + const targetKeys = Object.keys(compilationTarget); + if (targetKeys.length > 0) { + sourceFile = targetKeys[0]; // First key is the source file path + } + } catch (error) { + // Ignore metadata parsing errors for source info + } + } + // Extract ABI and bytecode const abi = artifact.abi; const bytecode = artifact.bytecode?.object || artifact.bytecode || ''; @@ -55,6 +231,35 @@ for (const contractName of contracts) { fs.mkdirSync(contractArtifactDir, { recursive: true }); } + // Save source metadata for this contract + const sourceMetadata = { + contractName: contractName, + artifactPath: jsonFile, + sourceFile: sourceFile, + compilationTarget: compilationTarget, + }; + const sourceMetadataFile = path.join(contractArtifactDir, `${contractName}.source.json`); + fs.writeFileSync(sourceMetadataFile, JSON.stringify(sourceMetadata, null, 2)); + + // Generate Standard JSON Input for contracts that need verification + if (contractsNeedingVerification.includes(contractName)) { + const standardJsonInput = generateStandardJsonInput(artifact, contractName); + if (standardJsonInput) { + const standardJsonFile = path.join(contractArtifactDir, `${contractName}.standard-json.json`); + fs.writeFileSync(standardJsonFile, JSON.stringify(standardJsonInput, null, 2)); + const sourceCount = Object.keys(standardJsonInput.sources).length; + console.log(`✓ Generated Standard JSON Input for ${contractName} (${sourceCount} sources)`); + } + + // Generate separate compiler settings file for easy access + const compilerSettings = generateCompilerSettings(artifact, contractName); + if (compilerSettings) { + const compilerSettingsFile = path.join(contractArtifactDir, `${contractName}.compiler-settings.json`); + fs.writeFileSync(compilerSettingsFile, JSON.stringify(compilerSettings, null, 2)); + console.log(`✓ Generated compiler settings for ${contractName}`); + } + } + // Always save ABI (even for interfaces) if (abi && Array.isArray(abi) && abi.length > 0) { const abiFile = path.join(contractArtifactDir, `${contractName}.abi.json`); @@ -66,7 +271,15 @@ for (const contractName of contracts) { if (bytecode && bytecode !== 'null' && bytecode !== '0x' && bytecode.length > 10) { const bytecodeFile = path.join(contractArtifactDir, `${contractName}.bytecode`); fs.writeFileSync(bytecodeFile, bytecode); - console.log(`✓ Extracted bytecode for ${contractName}`); + + // Calculate bytecode hash for verification (first 16 chars of sha256) + const bytecodeHash = crypto.createHash('sha256').update(bytecode).digest('hex').substring(0, 16); + + console.log(`✓ Extracted bytecode for ${contractName} (length: ${bytecode.length}, hash: ${bytecodeHash})`); + console.log(` Source: ${sourceFile}`); + if (compilationTarget && Object.keys(compilationTarget).length > 0) { + console.log(` Compilation target: ${JSON.stringify(compilationTarget)}`); + } // Save deployed bytecode if it exists if ( diff --git a/src/examples/README.md b/src/examples/README.md index 6367740d..199eed3d 100644 --- a/src/examples/README.md +++ b/src/examples/README.md @@ -74,6 +74,7 @@ The Inheritance pattern directly extends the Predicate client functionality thro **Key components:** - `MetaCoin.sol`: Inherits from `PredicateClient` to gain validation capabilities +- `PredicateHolding.sol`: Minimal example that holds Predicate configuration without business logic **How it works:** 1. The contract inherits from `PredicateClient` @@ -96,4 +97,4 @@ The Inheritance pattern directly extends the Predicate client functionality thro - **Use the Proxy pattern** when you need a clean separation of concerns and potentially upgradable validation logic. This provides maximum flexibility. - ~~**Wrapper pattern**~~ - **Deprecated in v2**. Use Inheritance or Proxy patterns instead. -Each pattern can be adapted to suit your specific needs and security requirements. \ No newline at end of file +Each pattern can be adapted to suit your specific needs and security requirements. diff --git a/src/examples/inheritance/PredicateHolding.sol b/src/examples/inheritance/PredicateHolding.sol new file mode 100644 index 00000000..521363cc --- /dev/null +++ b/src/examples/inheritance/PredicateHolding.sol @@ -0,0 +1,52 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.28; + +import {Ownable} from "@openzeppelin/contracts/access/Ownable.sol"; + +import {PredicateClient} from "../../mixins/PredicateClient.sol"; + +/** + * @title PredicateHolding + * @author Predicate Labs, Inc (https://predicate.io) + * @notice Minimal contract that integrates Predicate attestation validation via PredicateClient. + * @dev This contract holds Predicate configuration (registry + policy ID) but does not + * implement any business logic. It can be used as a simple, ownable Predicate client + * for storing policy IDs. + */ +contract PredicateHolding is PredicateClient, Ownable { + /** + * @notice Initializes ownership and Predicate client configuration + * @param _owner Address that will own this contract and control configuration + * @param _registry Address of the PredicateRegistry contract + * @param _policyID Initial policy identifier for this contract + */ + constructor( + address _owner, + address _registry, + string memory _policyID + ) Ownable(_owner) { + _initPredicateClient(_registry, _policyID); + } + + /** + * @notice Updates the policy ID for this contract + * @dev Restricted to the contract owner + * @param _policyID The new policy identifier to set + */ + function setPolicyID( + string memory _policyID + ) external onlyOwner { + _setPolicyID(_policyID); + } + + /** + * @notice Updates the PredicateRegistry address for this contract + * @dev Restricted to the contract owner + * @param _registry The new PredicateRegistry contract address + */ + function setRegistry( + address _registry + ) public onlyOwner { + _setRegistry(_registry); + } +}